Package gluon :: Module dal
[hide private]
[frames] | no frames]

Source Code for Module gluon.dal

    1  #!/bin/env python 
    2  # -*- coding: utf-8 -*- 
    3   
    4  """ 
    5  This file is part of the web2py Web Framework 
    6  Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu> 
    7  License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html) 
    8   
    9  Thanks to 
   10      * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support 
   11      * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support 
   12      * Denes 
   13      * Chris Clark 
   14      * clach05 
   15      * Denes Lengyel 
   16      * and many others who have contributed to current and previous versions 
   17   
   18  This file contains the DAL support for many relational databases, 
   19  including: 
   20  - SQLite & SpatiaLite 
   21  - MySQL 
   22  - Postgres 
   23  - Firebird 
   24  - Oracle 
   25  - MS SQL 
   26  - DB2 
   27  - Interbase 
   28  - Ingres 
   29  - Informix (9+ and SE) 
   30  - SapDB (experimental) 
   31  - Cubrid (experimental) 
   32  - CouchDB (experimental) 
   33  - MongoDB (in progress) 
   34  - Google:nosql 
   35  - Google:sql 
   36  - Teradata 
   37  - IMAP (experimental) 
   38   
   39  Example of usage: 
   40   
   41  >>> # from dal import DAL, Field 
   42   
   43  ### create DAL connection (and create DB if it doesn't exist) 
   44  >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'), 
   45  ... folder=None) 
   46   
   47  ### define a table 'person' (create/alter as necessary) 
   48  >>> person = db.define_table('person',Field('name','string')) 
   49   
   50  ### insert a record 
   51  >>> id = person.insert(name='James') 
   52   
   53  ### retrieve it by id 
   54  >>> james = person(id) 
   55   
   56  ### retrieve it by name 
   57  >>> james = person(name='James') 
   58   
   59  ### retrieve it by arbitrary query 
   60  >>> query = (person.name=='James') & (person.name.startswith('J')) 
   61  >>> james = db(query).select(person.ALL)[0] 
   62   
   63  ### update one record 
   64  >>> james.update_record(name='Jim') 
   65  <Row {'id': 1, 'name': 'Jim'}> 
   66   
   67  ### update multiple records by query 
   68  >>> db(person.name.like('J%')).update(name='James') 
   69  1 
   70   
   71  ### delete records by query 
   72  >>> db(person.name.lower() == 'jim').delete() 
   73  0 
   74   
   75  ### retrieve multiple records (rows) 
   76  >>> people = db(person).select(orderby=person.name, 
   77  ... groupby=person.name, limitby=(0,100)) 
   78   
   79  ### further filter them 
   80  >>> james = people.find(lambda row: row.name == 'James').first() 
   81  >>> print james.id, james.name 
   82  1 James 
   83   
   84  ### check aggregates 
   85  >>> counter = person.id.count() 
   86  >>> print db(person).select(counter).first()(counter) 
   87  1 
   88   
   89  ### delete one record 
   90  >>> james.delete_record() 
   91  1 
   92   
   93  ### delete (drop) entire database table 
   94  >>> person.drop() 
   95   
   96  Supported field types: 
   97  id string text boolean integer double decimal password upload 
   98  blob time date datetime 
   99   
  100  Supported DAL URI strings: 
  101  'sqlite://test.db' 
  102  'spatialite://test.db' 
  103  'sqlite:memory' 
  104  'spatialite:memory' 
  105  'jdbc:sqlite://test.db' 
  106  'mysql://root:none@localhost/test' 
  107  'postgres://mdipierro:password@localhost/test' 
  108  'postgres:psycopg2://mdipierro:password@localhost/test' 
  109  'postgres:pg8000://mdipierro:password@localhost/test' 
  110  'jdbc:postgres://mdipierro:none@localhost/test' 
  111  'mssql://web2py:none@A64X2/web2py_test' 
  112  'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings 
  113  'oracle://username:password@database' 
  114  'firebird://user:password@server:3050/database' 
  115  'db2://DSN=dsn;UID=user;PWD=pass' 
  116  'firebird://username:password@hostname/database' 
  117  'firebird_embedded://username:password@c://path' 
  118  'informix://user:password@server:3050/database' 
  119  'informixu://user:password@server:3050/database' # unicode informix 
  120  'ingres://database'  # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name' 
  121  'google:datastore' # for google app engine datastore 
  122  'google:sql' # for google app engine with sql (mysql compatible) 
  123  'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental 
  124  'imap://user:password@server:port' # experimental 
  125   
  126  For more info: 
  127  help(DAL) 
  128  help(Field) 
  129  """ 
  130   
  131  ################################################################################### 
  132  # this file only exposes DAL and Field 
  133  ################################################################################### 
  134   
  135  __all__ = ['DAL', 'Field'] 
  136   
  137  MAXCHARLENGTH = 2**15 # not quite but reasonable default max char length 
  138  DEFAULTLENGTH = {'string':512, 
  139                   'password':512, 
  140                   'upload':512, 
  141                   'text':2**15, 
  142                   'blob':2**31} 
  143  TIMINGSSIZE = 100 
  144  SPATIALLIBS = { 
  145      'Windows':'libspatialite', 
  146      'Linux':'libspatialite.so', 
  147      'Darwin':'libspatialite.dylib' 
  148      } 
  149  DEFAULT_URI = 'sqlite://dummy.db' 
  150   
  151  import re 
  152  import sys 
  153  import locale 
  154  import os 
  155  import types 
  156  import datetime 
  157  import threading 
  158  import time 
  159  import csv 
  160  import cgi 
  161  import copy 
  162  import socket 
  163  import logging 
  164  import base64 
  165  import shutil 
  166  import marshal 
  167  import decimal 
  168  import struct 
  169  import urllib 
  170  import hashlib 
  171  import uuid 
  172  import glob 
  173  import traceback 
  174  import platform 
  175   
  176  PYTHON_VERSION = sys.version_info[0] 
  177  if PYTHON_VERSION == 2: 
  178      import cPickle as pickle 
  179      import cStringIO as StringIO 
  180      import copy_reg as copyreg 
  181      hashlib_md5 = hashlib.md5 
  182      bytes, unicode = str, unicode 
  183  else: 
  184      import pickle 
  185      from io import StringIO as StringIO 
  186      import copyreg 
  187      long = int 
  188      hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8')) 
  189      bytes, unicode = bytes, str 
  190   
  191  CALLABLETYPES = (types.LambdaType, types.FunctionType, 
  192                   types.BuiltinFunctionType, 
  193                   types.MethodType, types.BuiltinMethodType) 
  194   
  195  TABLE_ARGS = set( 
  196      ('migrate','primarykey','fake_migrate','format','redefine', 
  197       'singular','plural','trigger_name','sequence_name', 
  198       'common_filter','polymodel','table_class','on_define',)) 
  199   
  200  SELECT_ARGS = set( 
  201      ('orderby', 'groupby', 'limitby','required', 'cache', 'left', 
  202       'distinct', 'having', 'join','for_update', 'processor','cacheable')) 
  203   
  204  ogetattr = object.__getattribute__ 
  205  osetattr = object.__setattr__ 
  206  exists = os.path.exists 
  207  pjoin = os.path.join 
  208   
  209  ################################################################################### 
  210  # following checks allow the use of dal without web2py, as a standalone module 
  211  ################################################################################### 
  212  try: 
  213      from utils import web2py_uuid 
  214  except (ImportError, SystemError): 
  215      import uuid 
216 - def web2py_uuid(): return str(uuid.uuid4())
217 218 try: 219 import portalocker 220 have_portalocker = True 221 except ImportError: 222 have_portalocker = False 223 224 try: 225 import serializers 226 have_serializers = True 227 except ImportError: 228 have_serializers = False 229 try: 230 import json as simplejson 231 except ImportError: 232 try: 233 import gluon.contrib.simplejson as simplejson 234 except ImportError: 235 simplejson = None 236 237 try: 238 import validators 239 have_validators = True 240 except (ImportError, SyntaxError): 241 have_validators = False 242 243 LOGGER = logging.getLogger("web2py.dal") 244 DEFAULT = lambda:0 245 246 GLOBAL_LOCKER = threading.RLock() 247 THREAD_LOCAL = threading.local() 248 249 # internal representation of tables with field 250 # <table>.<field>, tables and fields may only be [a-zA-Z0-9_] 251 252 REGEX_TYPE = re.compile('^([\w\_\:]+)') 253 REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*') 254 REGEX_W = re.compile('^\w+$') 255 REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$') 256 REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$') 257 REGEX_CLEANUP_FN = re.compile('[\'"\s;]+') 258 REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)') 259 REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$') 260 REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)") 261 REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')') 262 REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$') 263 REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$') 264 REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$') 265 REGEX_QUOTES = re.compile("'[^']*'") 266 REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$') 267 REGEX_PASSWORD = re.compile('\://([^:@]*)\:') 268 REGEX_NOPASSWD = re.compile('(?<=\:)([^:@/]+)(?=@.+)') 269 270 # list of drivers will be built on the fly 271 # and lists only what is available 272 DRIVERS = [] 273 274 try: 275 from new import classobj 276 from google.appengine.ext import db as gae 277 from google.appengine.api import namespace_manager, rdbms 278 from google.appengine.api.datastore_types import Key ### for belongs on ID 279 from google.appengine.ext.db.polymodel import PolyModel 280 DRIVERS.append('google') 281 except ImportError: 282 pass 283 284 if not 'google' in DRIVERS: 285 286 try: 287 from pysqlite2 import dbapi2 as sqlite2 288 DRIVERS.append('SQLite(sqlite2)') 289 except ImportError: 290 LOGGER.debug('no SQLite drivers pysqlite2.dbapi2') 291 292 try: 293 from sqlite3 import dbapi2 as sqlite3 294 DRIVERS.append('SQLite(sqlite3)') 295 except ImportError: 296 LOGGER.debug('no SQLite drivers sqlite3') 297 298 try: 299 # first try contrib driver, then from site-packages (if installed) 300 try: 301 import contrib.pymysql as pymysql 302 # monkeypatch pymysql because they havent fixed the bug: 303 # https://github.com/petehunt/PyMySQL/issues/86 304 pymysql.ESCAPE_REGEX = re.compile("'") 305 pymysql.ESCAPE_MAP = {"'": "''"} 306 # end monkeypatch 307 except ImportError: 308 import pymysql 309 DRIVERS.append('MySQL(pymysql)') 310 except ImportError: 311 LOGGER.debug('no MySQL driver pymysql') 312 313 try: 314 import MySQLdb 315 DRIVERS.append('MySQL(MySQLdb)') 316 except ImportError: 317 LOGGER.debug('no MySQL driver MySQLDB') 318 319 320 try: 321 import psycopg2 322 from psycopg2.extensions import adapt as psycopg2_adapt 323 DRIVERS.append('PostgreSQL(psycopg2)') 324 except ImportError: 325 LOGGER.debug('no PostgreSQL driver psycopg2') 326 327 try: 328 # first try contrib driver, then from site-packages (if installed) 329 try: 330 import contrib.pg8000.dbapi as pg8000 331 except ImportError: 332 import pg8000.dbapi as pg8000 333 DRIVERS.append('PostgreSQL(pg8000)') 334 except ImportError: 335 LOGGER.debug('no PostgreSQL driver pg8000') 336 337 try: 338 import cx_Oracle 339 DRIVERS.append('Oracle(cx_Oracle)') 340 except ImportError: 341 LOGGER.debug('no Oracle driver cx_Oracle') 342 343 try: 344 try: 345 import pyodbc 346 except ImportError: 347 try: 348 import contrib.pypyodbc as pyodbc 349 except Exception, e: 350 raise ImportError(str(e)) 351 DRIVERS.append('MSSQL(pyodbc)') 352 DRIVERS.append('DB2(pyodbc)') 353 DRIVERS.append('Teradata(pyodbc)') 354 DRIVERS.append('Ingres(pyodbc)') 355 except ImportError: 356 LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc') 357 358 try: 359 import Sybase 360 DRIVERS.append('Sybase(Sybase)') 361 except ImportError: 362 LOGGER.debug('no Sybase driver') 363 364 try: 365 import kinterbasdb 366 DRIVERS.append('Interbase(kinterbasdb)') 367 DRIVERS.append('Firebird(kinterbasdb)') 368 except ImportError: 369 LOGGER.debug('no Firebird/Interbase driver kinterbasdb') 370 371 try: 372 import fdb 373 DRIVERS.append('Firebird(fdb)') 374 except ImportError: 375 LOGGER.debug('no Firebird driver fdb') 376 ##### 377 try: 378 import firebirdsql 379 DRIVERS.append('Firebird(firebirdsql)') 380 except ImportError: 381 LOGGER.debug('no Firebird driver firebirdsql') 382 383 try: 384 import informixdb 385 DRIVERS.append('Informix(informixdb)') 386 LOGGER.warning('Informix support is experimental') 387 except ImportError: 388 LOGGER.debug('no Informix driver informixdb') 389 390 try: 391 import sapdb 392 DRIVERS.append('SQL(sapdb)') 393 LOGGER.warning('SAPDB support is experimental') 394 except ImportError: 395 LOGGER.debug('no SAP driver sapdb') 396 397 try: 398 import cubriddb 399 DRIVERS.append('Cubrid(cubriddb)') 400 LOGGER.warning('Cubrid support is experimental') 401 except ImportError: 402 LOGGER.debug('no Cubrid driver cubriddb') 403 404 try: 405 from com.ziclix.python.sql import zxJDBC 406 import java.sql 407 # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/ 408 from org.sqlite import JDBC # required by java.sql; ensure we have it 409 zxJDBC_sqlite = java.sql.DriverManager 410 DRIVERS.append('PostgreSQL(zxJDBC)') 411 DRIVERS.append('SQLite(zxJDBC)') 412 LOGGER.warning('zxJDBC support is experimental') 413 is_jdbc = True 414 except ImportError: 415 LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC') 416 is_jdbc = False 417 418 try: 419 import couchdb 420 DRIVERS.append('CouchDB(couchdb)') 421 except ImportError: 422 LOGGER.debug('no Couchdb driver couchdb') 423 424 try: 425 import pymongo 426 DRIVERS.append('MongoDB(pymongo)') 427 except: 428 LOGGER.debug('no MongoDB driver pymongo') 429 430 try: 431 import imaplib 432 DRIVERS.append('IMAP(imaplib)') 433 except: 434 LOGGER.debug('no IMAP driver imaplib') 435 436 PLURALIZE_RULES = [ 437 (re.compile('child$'), re.compile('child$'), 'children'), 438 (re.compile('oot$'), re.compile('oot$'), 'eet'), 439 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'), 440 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'), 441 (re.compile('sis$'), re.compile('sis$'), 'ses'), 442 (re.compile('man$'), re.compile('man$'), 'men'), 443 (re.compile('ife$'), re.compile('ife$'), 'ives'), 444 (re.compile('eau$'), re.compile('eau$'), 'eaux'), 445 (re.compile('lf$'), re.compile('lf$'), 'lves'), 446 (re.compile('[sxz]$'), re.compile('$'), 'es'), 447 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'), 448 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'), 449 (re.compile('$'), re.compile('$'), 's'), 450 ]
451 452 -def pluralize(singular, rules=PLURALIZE_RULES):
453 for line in rules: 454 re_search, re_sub, replace = line 455 plural = re_search.search(singular) and re_sub.sub(replace, singular) 456 if plural: return plural
457
458 -def hide_password(uri):
459 return REGEX_NOPASSWD.sub('******',uri)
460
461 -def OR(a,b):
462 return a|b
463
464 -def AND(a,b):
465 return a&b
466
467 -def IDENTITY(x): return x
468
469 -def varquote_aux(name,quotestr='%s'):
470 return name if REGEX_W.match(name) else quotestr % name
471 472 if 'google' in DRIVERS: 473 474 is_jdbc = False
475 476 - class GAEDecimalProperty(gae.Property):
477 """ 478 GAE decimal implementation 479 """ 480 data_type = decimal.Decimal 481
482 - def __init__(self, precision, scale, **kwargs):
483 super(GAEDecimalProperty, self).__init__(self, **kwargs) 484 d = '1.' 485 for x in range(scale): 486 d += '0' 487 self.round = decimal.Decimal(d)
488
489 - def get_value_for_datastore(self, model_instance):
490 value = super(GAEDecimalProperty, self)\ 491 .get_value_for_datastore(model_instance) 492 if value is None or value == '': 493 return None 494 else: 495 return str(value)
496
497 - def make_value_from_datastore(self, value):
498 if value is None or value == '': 499 return None 500 else: 501 return decimal.Decimal(value).quantize(self.round)
502
503 - def validate(self, value):
504 value = super(GAEDecimalProperty, self).validate(value) 505 if value is None or isinstance(value, decimal.Decimal): 506 return value 507 elif isinstance(value, basestring): 508 return decimal.Decimal(value) 509 raise gae.BadValueError("Property %s must be a Decimal or string."\ 510 % self.name)
511
512 ################################################################################### 513 # class that handles connection pooling (all adapters are derived from this one) 514 ################################################################################### 515 516 -class ConnectionPool(object):
517 518 POOLS = {} 519 check_active_connection = True 520 521 @staticmethod
522 - def set_folder(folder):
523 THREAD_LOCAL.folder = folder
524 525 # ## this allows gluon to commit/rollback all dbs in this thread 526
527 - def close(self,action='commit',really=True):
528 if action: 529 if callable(action): 530 action(self) 531 else: 532 getattr(self, action)() 533 # ## if you want pools, recycle this connection 534 if self.pool_size: 535 GLOBAL_LOCKER.acquire() 536 pool = ConnectionPool.POOLS[self.uri] 537 if len(pool) < self.pool_size: 538 pool.append(self.connection) 539 really = False 540 GLOBAL_LOCKER.release() 541 if really: 542 self.close_connection() 543 self.connection = None
544 545 @staticmethod
546 - def close_all_instances(action):
547 """ to close cleanly databases in a multithreaded environment """ 548 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items() 549 for db_uid, db_group in dbs: 550 for db in db_group: 551 if hasattr(db,'_adapter'): 552 db._adapter.close(action) 553 getattr(THREAD_LOCAL,'db_instances',{}).clear() 554 getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear() 555 if callable(action): 556 action(None) 557 return
558
559 - def find_or_make_work_folder(self):
560 """ this actually does not make the folder. it has to be there """ 561 self.folder = getattr(THREAD_LOCAL,'folder','') 562 563 # Creating the folder if it does not exist 564 if False and self.folder and not exists(self.folder): 565 os.mkdir(self.folder)
566
567 - def after_connection_hook(self):
568 """hook for the after_connection parameter""" 569 if callable(self._after_connection): 570 self._after_connection(self) 571 self.after_connection()
572
573 - def after_connection(self):
574 """ this it is supposed to be overloaded by adapters""" 575 pass
576
577 - def reconnect(self, f=None, cursor=True):
578 """ 579 this function defines: self.connection and self.cursor 580 (iff cursor is True) 581 if self.pool_size>0 it will try pull the connection from the pool 582 if the connection is not active (closed by db server) it will loop 583 if not self.pool_size or no active connections in pool makes a new one 584 """ 585 if getattr(self,'connection',None) != None: 586 return 587 if f is None: 588 f = self.connector 589 590 if not self.pool_size: 591 self.connection = f() 592 self.cursor = cursor and self.connection.cursor() 593 else: 594 uri = self.uri 595 POOLS = ConnectionPool.POOLS 596 while True: 597 GLOBAL_LOCKER.acquire() 598 if not uri in POOLS: 599 POOLS[uri] = [] 600 if POOLS[uri]: 601 self.connection = POOLS[uri].pop() 602 GLOBAL_LOCKER.release() 603 self.cursor = cursor and self.connection.cursor() 604 try: 605 if self.cursor and self.check_active_connection: 606 self.execute('SELECT 1;') 607 break 608 except: 609 pass 610 else: 611 GLOBAL_LOCKER.release() 612 self.connection = f() 613 self.cursor = cursor and self.connection.cursor() 614 break 615 self.after_connection_hook()
616
617 618 ################################################################################### 619 # this is a generic adapter that does nothing; all others are derived from this one 620 ################################################################################### 621 622 -class BaseAdapter(ConnectionPool):
623 native_json = False 624 driver = None 625 driver_name = None 626 drivers = () # list of drivers from which to pick 627 connection = None 628 maxcharlength = MAXCHARLENGTH 629 commit_on_alter_table = False 630 support_distributed_transaction = False 631 uploads_in_blob = False 632 can_select_for_update = True 633 634 TRUE = 'T' 635 FALSE = 'F' 636 T_SEP = ' ' 637 types = { 638 'boolean': 'CHAR(1)', 639 'string': 'CHAR(%(length)s)', 640 'text': 'TEXT', 641 'json': 'TEXT', 642 'password': 'CHAR(%(length)s)', 643 'blob': 'BLOB', 644 'upload': 'CHAR(%(length)s)', 645 'integer': 'INTEGER', 646 'bigint': 'INTEGER', 647 'float':'DOUBLE', 648 'double': 'DOUBLE', 649 'decimal': 'DOUBLE', 650 'date': 'DATE', 651 'time': 'TIME', 652 'datetime': 'TIMESTAMP', 653 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT', 654 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 655 'list:integer': 'TEXT', 656 'list:string': 'TEXT', 657 'list:reference': 'TEXT', 658 # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference' 659 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT', 660 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 661 } 662
663 - def id_query(self, table):
664 return table._id != None
665
666 - def adapt(self, obj):
667 return "'%s'" % obj.replace("'", "''")
668
669 - def smart_adapt(self, obj):
670 if isinstance(obj,(int,float)): 671 return str(obj) 672 return self.adapt(str(obj))
673
674 - def integrity_error(self):
675 return self.driver.IntegrityError
676
677 - def operational_error(self):
678 return self.driver.OperationalError
679
680 - def file_exists(self, filename):
681 """ 682 to be used ONLY for files that on GAE may not be on filesystem 683 """ 684 return exists(filename)
685
686 - def file_open(self, filename, mode='rb', lock=True):
687 """ 688 to be used ONLY for files that on GAE may not be on filesystem 689 """ 690 if have_portalocker and lock: 691 fileobj = portalocker.LockedFile(filename,mode) 692 else: 693 fileobj = open(filename,mode) 694 return fileobj
695
696 - def file_close(self, fileobj):
697 """ 698 to be used ONLY for files that on GAE may not be on filesystem 699 """ 700 if fileobj: 701 fileobj.close()
702
703 - def file_delete(self, filename):
704 os.unlink(filename)
705
706 - def find_driver(self,adapter_args,uri=None):
707 if getattr(self,'driver',None) != None: 708 return 709 drivers_available = [driver for driver in self.drivers 710 if driver in globals()] 711 if uri: 712 items = uri.split('://',1)[0].split(':') 713 request_driver = items[1] if len(items)>1 else None 714 else: 715 request_driver = None 716 request_driver = request_driver or adapter_args.get('driver') 717 if request_driver: 718 if request_driver in drivers_available: 719 self.driver_name = request_driver 720 self.driver = globals().get(request_driver) 721 else: 722 raise RuntimeError("driver %s not available" % request_driver) 723 elif drivers_available: 724 self.driver_name = drivers_available[0] 725 self.driver = globals().get(self.driver_name) 726 else: 727 raise RuntimeError("no driver available %s" % str(self.drivers))
728 729
730 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8', 731 credential_decoder=IDENTITY, driver_args={}, 732 adapter_args={},do_connect=True, after_connection=None):
733 self.db = db 734 self.dbengine = "None" 735 self.uri = uri 736 self.pool_size = pool_size 737 self.folder = folder 738 self.db_codec = db_codec 739 self._after_connection = after_connection 740 class Dummy(object): 741 lastrowid = 1 742 def __getattr__(self, value): 743 return lambda *a, **b: []
744 self.connection = Dummy() 745 self.cursor = Dummy() 746
747 - def sequence_name(self,tablename):
748 return '%s_sequence' % tablename
749
750 - def trigger_name(self,tablename):
751 return '%s_sequence' % tablename
752
753 - def varquote(self,name):
754 return name
755
756 - def create_table(self, table, 757 migrate=True, 758 fake_migrate=False, 759 polymodel=None):
760 db = table._db 761 fields = [] 762 # PostGIS geo fields are added after the table has been created 763 postcreation_fields = [] 764 sql_fields = {} 765 sql_fields_aux = {} 766 TFK = {} 767 tablename = table._tablename 768 sortable = 0 769 types = self.types 770 for field in table: 771 sortable += 1 772 field_name = field.name 773 field_type = field.type 774 if isinstance(field_type,SQLCustomType): 775 ftype = field_type.native or field_type.type 776 elif field_type.startswith('reference'): 777 referenced = field_type[10:].strip() 778 if referenced == '.': 779 referenced = tablename 780 constraint_name = self.constraint_name(tablename, field_name) 781 if not '.' in referenced \ 782 and referenced != tablename \ 783 and hasattr(table,'_primarykey'): 784 ftype = types['integer'] 785 else: 786 if hasattr(table,'_primarykey'): 787 rtablename,rfieldname = referenced.split('.') 788 rtable = db[rtablename] 789 rfield = rtable[rfieldname] 790 # must be PK reference or unique 791 if rfieldname in rtable._primarykey or \ 792 rfield.unique: 793 ftype = types[rfield.type[:9]] % \ 794 dict(length=rfield.length) 795 # multicolumn primary key reference? 796 if not rfield.unique and len(rtable._primarykey)>1: 797 # then it has to be a table level FK 798 if rtablename not in TFK: 799 TFK[rtablename] = {} 800 TFK[rtablename][rfieldname] = field_name 801 else: 802 ftype = ftype + \ 803 types['reference FK'] % dict( 804 constraint_name = constraint_name, # should be quoted 805 foreign_key = '%s (%s)' % (rtablename, 806 rfieldname), 807 table_name = tablename, 808 field_name = field_name, 809 on_delete_action=field.ondelete) 810 else: 811 # make a guess here for circular references 812 if referenced in db: 813 id_fieldname = db[referenced]._id.name 814 elif referenced == tablename: 815 id_fieldname = table._id.name 816 else: #make a guess 817 id_fieldname = 'id' 818 ftype = types[field_type[:9]] % dict( 819 index_name = field_name+'__idx', 820 field_name = field_name, 821 constraint_name = constraint_name, 822 foreign_key = '%s (%s)' % (referenced, 823 id_fieldname), 824 on_delete_action=field.ondelete) 825 elif field_type.startswith('list:reference'): 826 ftype = types[field_type[:14]] 827 elif field_type.startswith('decimal'): 828 precision, scale = map(int,field_type[8:-1].split(',')) 829 ftype = types[field_type[:7]] % \ 830 dict(precision=precision,scale=scale) 831 elif field_type.startswith('geo'): 832 if not hasattr(self,'srid'): 833 raise RuntimeError('Adapter does not support geometry') 834 srid = self.srid 835 geotype, parms = field_type[:-1].split('(') 836 if not geotype in types: 837 raise SyntaxError( 838 'Field: unknown field type: %s for %s' \ 839 % (field_type, field_name)) 840 ftype = types[geotype] 841 if self.dbengine == 'postgres' and geotype == 'geometry': 842 # parameters: schema, srid, dimension 843 dimension = 2 # GIS.dimension ??? 844 parms = parms.split(',') 845 if len(parms) == 3: 846 schema, srid, dimension = parms 847 elif len(parms) == 2: 848 schema, srid = parms 849 else: 850 schema = parms[0] 851 ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype] 852 ftype = ftype % dict(schema=schema, 853 tablename=tablename, 854 fieldname=field_name, srid=srid, 855 dimension=dimension) 856 postcreation_fields.append(ftype) 857 elif not field_type in types: 858 raise SyntaxError('Field: unknown field type: %s for %s' % \ 859 (field_type, field_name)) 860 else: 861 ftype = types[field_type]\ 862 % dict(length=field.length) 863 if not field_type.startswith('id') and \ 864 not field_type.startswith('reference'): 865 if field.notnull: 866 ftype += ' NOT NULL' 867 else: 868 ftype += self.ALLOW_NULL() 869 if field.unique: 870 ftype += ' UNIQUE' 871 if field.custom_qualifier: 872 ftype += ' %s' % field.custom_qualifier 873 874 # add to list of fields 875 sql_fields[field_name] = dict( 876 length=field.length, 877 unique=field.unique, 878 notnull=field.notnull, 879 sortable=sortable, 880 type=str(field_type), 881 sql=ftype) 882 883 if field.notnull and not field.default is None: 884 # Caveat: sql_fields and sql_fields_aux 885 # differ for default values. 886 # sql_fields is used to trigger migrations and sql_fields_aux 887 # is used for create tables. 888 # The reason is that we do not want to trigger 889 # a migration simply because a default value changes. 890 not_null = self.NOT_NULL(field.default, field_type) 891 ftype = ftype.replace('NOT NULL', not_null) 892 sql_fields_aux[field_name] = dict(sql=ftype) 893 # Postgres - PostGIS: 894 # geometry fields are added after the table has been created, not now 895 if not (self.dbengine == 'postgres' and \ 896 field_type.startswith('geom')): 897 fields.append('%s %s' % (field_name, ftype)) 898 other = ';' 899 900 # backend-specific extensions to fields 901 if self.dbengine == 'mysql': 902 if not hasattr(table, "_primarykey"): 903 fields.append('PRIMARY KEY(%s)' % table._id.name) 904 other = ' ENGINE=InnoDB CHARACTER SET utf8;' 905 906 fields = ',\n '.join(fields) 907 for rtablename in TFK: 908 rfields = TFK[rtablename] 909 pkeys = db[rtablename]._primarykey 910 fkeys = [ rfields[k] for k in pkeys ] 911 fields = fields + ',\n ' + \ 912 types['reference TFK'] % dict( 913 table_name = tablename, 914 field_name=', '.join(fkeys), 915 foreign_table = rtablename, 916 foreign_key = ', '.join(pkeys), 917 on_delete_action = field.ondelete) 918 919 if hasattr(table,'_primarykey'): 920 query = "CREATE TABLE %s(\n %s,\n %s) %s" % \ 921 (tablename, fields, 922 self.PRIMARY_KEY(', '.join(table._primarykey)),other) 923 else: 924 query = "CREATE TABLE %s(\n %s\n)%s" % \ 925 (tablename, fields, other) 926 927 if self.uri.startswith('sqlite:///') \ 928 or self.uri.startswith('spatialite:///'): 929 path_encoding = sys.getfilesystemencoding() \ 930 or locale.getdefaultlocale()[1] or 'utf8' 931 dbpath = self.uri[9:self.uri.rfind('/')]\ 932 .decode('utf8').encode(path_encoding) 933 else: 934 dbpath = self.folder 935 936 if not migrate: 937 return query 938 elif self.uri.startswith('sqlite:memory')\ 939 or self.uri.startswith('spatialite:memory'): 940 table._dbt = None 941 elif isinstance(migrate, str): 942 table._dbt = pjoin(dbpath, migrate) 943 else: 944 table._dbt = pjoin( 945 dbpath, '%s_%s.table' % (table._db._uri_hash, tablename)) 946 947 if table._dbt: 948 table._loggername = pjoin(dbpath, 'sql.log') 949 logfile = self.file_open(table._loggername, 'a') 950 else: 951 logfile = None 952 if not table._dbt or not self.file_exists(table._dbt): 953 if table._dbt: 954 logfile.write('timestamp: %s\n' 955 % datetime.datetime.today().isoformat()) 956 logfile.write(query + '\n') 957 if not fake_migrate: 958 self.create_sequence_and_triggers(query,table) 959 table._db.commit() 960 # Postgres geom fields are added now, 961 # after the table has been created 962 for query in postcreation_fields: 963 self.execute(query) 964 table._db.commit() 965 if table._dbt: 966 tfile = self.file_open(table._dbt, 'w') 967 pickle.dump(sql_fields, tfile) 968 self.file_close(tfile) 969 if fake_migrate: 970 logfile.write('faked!\n') 971 else: 972 logfile.write('success!\n') 973 else: 974 tfile = self.file_open(table._dbt, 'r') 975 try: 976 sql_fields_old = pickle.load(tfile) 977 except EOFError: 978 self.file_close(tfile) 979 self.file_close(logfile) 980 raise RuntimeError('File %s appears corrupted' % table._dbt) 981 self.file_close(tfile) 982 if sql_fields != sql_fields_old: 983 self.migrate_table(table, 984 sql_fields, sql_fields_old, 985 sql_fields_aux, logfile, 986 fake_migrate=fake_migrate) 987 self.file_close(logfile) 988 return query
989
990 - def migrate_table( 991 self, 992 table, 993 sql_fields, 994 sql_fields_old, 995 sql_fields_aux, 996 logfile, 997 fake_migrate=False, 998 ):
999 db = table._db 1000 db._migrated.append(table._tablename) 1001 tablename = table._tablename 1002 def fix(item): 1003 k,v=item 1004 if not isinstance(v,dict): 1005 v=dict(type='unkown',sql=v) 1006 return k.lower(),v
1007 # make sure all field names are lower case to avoid 1008 # migrations because of case cahnge 1009 sql_fields = dict(map(fix,sql_fields.iteritems())) 1010 sql_fields_old = dict(map(fix,sql_fields_old.iteritems())) 1011 sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems())) 1012 if db._debug: 1013 logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields)) 1014 1015 keys = sql_fields.keys() 1016 for key in sql_fields_old: 1017 if not key in keys: 1018 keys.append(key) 1019 new_add = self.concat_add(tablename) 1020 1021 metadata_change = False 1022 sql_fields_current = copy.copy(sql_fields_old) 1023 for key in keys: 1024 query = None 1025 if not key in sql_fields_old: 1026 sql_fields_current[key] = sql_fields[key] 1027 if self.dbengine in ('postgres',) and \ 1028 sql_fields[key]['type'].startswith('geometry'): 1029 # 'sql' == ftype in sql 1030 query = [ sql_fields[key]['sql'] ] 1031 else: 1032 query = ['ALTER TABLE %s ADD %s %s;' % \ 1033 (tablename, key, 1034 sql_fields_aux[key]['sql'].replace(', ', new_add))] 1035 metadata_change = True 1036 elif self.dbengine in ('sqlite', 'spatialite'): 1037 if key in sql_fields: 1038 sql_fields_current[key] = sql_fields[key] 1039 metadata_change = True 1040 elif not key in sql_fields: 1041 del sql_fields_current[key] 1042 ftype = sql_fields_old[key]['type'] 1043 if self.dbengine in ('postgres',) \ 1044 and ftype.startswith('geometry'): 1045 geotype, parms = ftype[:-1].split('(') 1046 schema = parms.split(',')[0] 1047 query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" % dict(schema=schema, table=tablename, field=key,) ] 1048 elif not self.dbengine in ('firebird',): 1049 query = ['ALTER TABLE %s DROP COLUMN %s;' 1050 % (tablename, key)] 1051 else: 1052 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)] 1053 metadata_change = True 1054 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \ 1055 and not (key in table.fields and 1056 isinstance(table[key].type, SQLCustomType)) \ 1057 and not sql_fields[key]['type'].startswith('reference')\ 1058 and not sql_fields[key]['type'].startswith('double')\ 1059 and not sql_fields[key]['type'].startswith('id'): 1060 sql_fields_current[key] = sql_fields[key] 1061 t = tablename 1062 tt = sql_fields_aux[key]['sql'].replace(', ', new_add) 1063 if not self.dbengine in ('firebird',): 1064 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt), 1065 'UPDATE %s SET %s__tmp=%s;' % (t, key, key), 1066 'ALTER TABLE %s DROP COLUMN %s;' % (t, key), 1067 'ALTER TABLE %s ADD %s %s;' % (t, key, tt), 1068 'UPDATE %s SET %s=%s__tmp;' % (t, key, key), 1069 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)] 1070 else: 1071 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt), 1072 'UPDATE %s SET %s__tmp=%s;' % (t, key, key), 1073 'ALTER TABLE %s DROP %s;' % (t, key), 1074 'ALTER TABLE %s ADD %s %s;' % (t, key, tt), 1075 'UPDATE %s SET %s=%s__tmp;' % (t, key, key), 1076 'ALTER TABLE %s DROP %s__tmp;' % (t, key)] 1077 metadata_change = True 1078 elif sql_fields[key]['type'] != sql_fields_old[key]['type']: 1079 sql_fields_current[key] = sql_fields[key] 1080 metadata_change = True 1081 1082 if query: 1083 logfile.write('timestamp: %s\n' 1084 % datetime.datetime.today().isoformat()) 1085 db['_lastsql'] = '\n'.join(query) 1086 for sub_query in query: 1087 logfile.write(sub_query + '\n') 1088 if not fake_migrate: 1089 self.execute(sub_query) 1090 # Caveat: mysql, oracle and firebird do not allow multiple alter table 1091 # in one transaction so we must commit partial transactions and 1092 # update table._dbt after alter table. 1093 if db._adapter.commit_on_alter_table: 1094 db.commit() 1095 tfile = self.file_open(table._dbt, 'w') 1096 pickle.dump(sql_fields_current, tfile) 1097 self.file_close(tfile) 1098 logfile.write('success!\n') 1099 else: 1100 logfile.write('faked!\n') 1101 elif metadata_change: 1102 tfile = self.file_open(table._dbt, 'w') 1103 pickle.dump(sql_fields_current, tfile) 1104 self.file_close(tfile) 1105 1106 if metadata_change and \ 1107 not (query and self.dbengine in ('mysql','oracle','firebird')): 1108 db.commit() 1109 tfile = self.file_open(table._dbt, 'w') 1110 pickle.dump(sql_fields_current, tfile) 1111 self.file_close(tfile) 1112
1113 - def LOWER(self, first):
1114 return 'LOWER(%s)' % self.expand(first)
1115
1116 - def UPPER(self, first):
1117 return 'UPPER(%s)' % self.expand(first)
1118
1119 - def COUNT(self, first, distinct=None):
1120 return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \ 1121 % self.expand(first)
1122
1123 - def EXTRACT(self, first, what):
1124 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
1125
1126 - def EPOCH(self, first):
1127 return self.EXTRACT(first, 'epoch')
1128
1129 - def AGGREGATE(self, first, what):
1130 return "%s(%s)" % (what, self.expand(first))
1131
1132 - def JOIN(self):
1133 return 'JOIN'
1134
1135 - def LEFT_JOIN(self):
1136 return 'LEFT JOIN'
1137
1138 - def RANDOM(self):
1139 return 'Random()'
1140
1141 - def NOT_NULL(self, default, field_type):
1142 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
1143
1144 - def COALESCE(self, first, second):
1145 expressions = [self.expand(first)]+[self.expand(e) for e in second] 1146 return 'COALESCE(%s)' % ','.join(expressions)
1147
1148 - def COALESCE_ZERO(self, first):
1149 return 'COALESCE(%s,0)' % self.expand(first)
1150
1151 - def RAW(self, first):
1152 return first
1153
1154 - def ALLOW_NULL(self):
1155 return ''
1156
1157 - def SUBSTRING(self, field, parameters):
1158 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1159
1160 - def PRIMARY_KEY(self, key):
1161 return 'PRIMARY KEY(%s)' % key
1162
1163 - def _drop(self, table, mode):
1164 return ['DROP TABLE %s;' % table]
1165
1166 - def drop(self, table, mode=''):
1167 db = table._db 1168 if table._dbt: 1169 logfile = self.file_open(table._loggername, 'a') 1170 queries = self._drop(table, mode) 1171 for query in queries: 1172 if table._dbt: 1173 logfile.write(query + '\n') 1174 self.execute(query) 1175 db.commit() 1176 del db[table._tablename] 1177 del db.tables[db.tables.index(table._tablename)] 1178 db._remove_references_to(table) 1179 if table._dbt: 1180 self.file_delete(table._dbt) 1181 logfile.write('success!\n')
1182
1183 - def _insert(self, table, fields):
1184 if fields: 1185 keys = ','.join(f.name for f, v in fields) 1186 values = ','.join(self.expand(v, f.type) for f, v in fields) 1187 return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values) 1188 else: 1189 return self._insert_empty(table)
1190
1191 - def _insert_empty(self, table):
1192 return 'INSERT INTO %s DEFAULT VALUES;' % table
1193
1194 - def insert(self, table, fields):
1195 query = self._insert(table,fields) 1196 try: 1197 self.execute(query) 1198 except Exception: 1199 e = sys.exc_info()[1] 1200 if isinstance(e,self.integrity_error_class()): 1201 return None 1202 raise e 1203 if hasattr(table,'_primarykey'): 1204 return dict([(k[0].name, k[1]) for k in fields \ 1205 if k[0].name in table._primarykey]) 1206 id = self.lastrowid(table) 1207 if not isinstance(id,int): 1208 return id 1209 rid = Reference(id) 1210 (rid._table, rid._record) = (table, None) 1211 return rid
1212
1213 - def bulk_insert(self, table, items):
1214 return [self.insert(table,item) for item in items]
1215
1216 - def NOT(self, first):
1217 return '(NOT %s)' % self.expand(first)
1218
1219 - def AND(self, first, second):
1220 return '(%s AND %s)' % (self.expand(first), self.expand(second))
1221
1222 - def OR(self, first, second):
1223 return '(%s OR %s)' % (self.expand(first), self.expand(second))
1224
1225 - def BELONGS(self, first, second):
1226 if isinstance(second, str): 1227 return '(%s IN (%s))' % (self.expand(first), second[:-1]) 1228 elif not second: 1229 return '(1=0)' 1230 items = ','.join(self.expand(item, first.type) for item in second) 1231 return '(%s IN (%s))' % (self.expand(first), items)
1232
1233 - def REGEXP(self, first, second):
1234 "regular expression operator" 1235 raise NotImplementedError
1236
1237 - def LIKE(self, first, second):
1238 "case sensitive like operator" 1239 raise NotImplementedError
1240
1241 - def ILIKE(self, first, second):
1242 "case in-sensitive like operator" 1243 return '(%s LIKE %s)' % (self.expand(first), 1244 self.expand(second, 'string'))
1245
1246 - def STARTSWITH(self, first, second):
1247 return '(%s LIKE %s)' % (self.expand(first), 1248 self.expand(second+'%', 'string'))
1249
1250 - def ENDSWITH(self, first, second):
1251 return '(%s LIKE %s)' % (self.expand(first), 1252 self.expand('%'+second, 'string'))
1253
1254 - def CONTAINS(self, first, second, case_sensitive=False):
1255 if isinstance(second,Expression): 1256 field = self.expand(first) 1257 expr = self.expand(second,'string') 1258 if first.type.startswith('list:'): 1259 expr = 'CONCAT("|", %s, "|")' % expr 1260 elif not first.type in ('string', 'text', 'json'): 1261 raise RuntimeError("Expression Not Supported") 1262 return 'INSTR(%s,%s)' % (field, expr) 1263 else: 1264 if first.type in ('string', 'text', 'json'): 1265 key = '%'+str(second).replace('%','%%')+'%' 1266 elif first.type.startswith('list:'): 1267 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%' 1268 else: 1269 raise RuntimeError("Expression Not Supported") 1270 op = case_sensitive and self.LIKE or self.ILIKE 1271 return op(first,key)
1272
1273 - def EQ(self, first, second=None):
1274 if second is None: 1275 return '(%s IS NULL)' % self.expand(first) 1276 return '(%s = %s)' % (self.expand(first), 1277 self.expand(second, first.type))
1278
1279 - def NE(self, first, second=None):
1280 if second is None: 1281 return '(%s IS NOT NULL)' % self.expand(first) 1282 return '(%s <> %s)' % (self.expand(first), 1283 self.expand(second, first.type))
1284
1285 - def LT(self,first,second=None):
1286 if second is None: 1287 raise RuntimeError("Cannot compare %s < None" % first) 1288 return '(%s < %s)' % (self.expand(first), 1289 self.expand(second,first.type))
1290
1291 - def LE(self,first,second=None):
1292 if second is None: 1293 raise RuntimeError("Cannot compare %s <= None" % first) 1294 return '(%s <= %s)' % (self.expand(first), 1295 self.expand(second,first.type))
1296
1297 - def GT(self,first,second=None):
1298 if second is None: 1299 raise RuntimeError("Cannot compare %s > None" % first) 1300 return '(%s > %s)' % (self.expand(first), 1301 self.expand(second,first.type))
1302
1303 - def GE(self,first,second=None):
1304 if second is None: 1305 raise RuntimeError("Cannot compare %s >= None" % first) 1306 return '(%s >= %s)' % (self.expand(first), 1307 self.expand(second,first.type))
1308
1309 - def ADD(self, first, second):
1310 return '(%s + %s)' % (self.expand(first), 1311 self.expand(second, first.type))
1312
1313 - def SUB(self, first, second):
1314 return '(%s - %s)' % (self.expand(first), 1315 self.expand(second, first.type))
1316
1317 - def MUL(self, first, second):
1318 return '(%s * %s)' % (self.expand(first), 1319 self.expand(second, first.type))
1320
1321 - def DIV(self, first, second):
1322 return '(%s / %s)' % (self.expand(first), 1323 self.expand(second, first.type))
1324
1325 - def MOD(self, first, second):
1326 return '(%s %% %s)' % (self.expand(first), 1327 self.expand(second, first.type))
1328
1329 - def AS(self, first, second):
1330 return '%s AS %s' % (self.expand(first), second)
1331
1332 - def ON(self, first, second):
1333 if use_common_filters(second): 1334 second = self.common_filter(second,[first._tablename]) 1335 return '%s ON %s' % (self.expand(first), self.expand(second))
1336
1337 - def INVERT(self, first):
1338 return '%s DESC' % self.expand(first)
1339
1340 - def COMMA(self, first, second):
1341 return '%s, %s' % (self.expand(first), self.expand(second))
1342
1343 - def expand(self, expression, field_type=None):
1344 if isinstance(expression, Field): 1345 return '%s.%s' % (expression.tablename, expression.name) 1346 elif isinstance(expression, (Expression, Query)): 1347 first = expression.first 1348 second = expression.second 1349 op = expression.op 1350 optional_args = expression.optional_args or {} 1351 if not second is None: 1352 return op(first, second, **optional_args) 1353 elif not first is None: 1354 return op(first,**optional_args) 1355 elif isinstance(op, str): 1356 if op.endswith(';'): 1357 op=op[:-1] 1358 return '(%s)' % op 1359 else: 1360 return op() 1361 elif field_type: 1362 return str(self.represent(expression,field_type)) 1363 elif isinstance(expression,(list,tuple)): 1364 return ','.join(self.represent(item,field_type) \ 1365 for item in expression) 1366 elif isinstance(expression, bool): 1367 return '1' if expression else '0' 1368 else: 1369 return str(expression)
1370
1371 - def alias(self, table, alias):
1372 """ 1373 Given a table object, makes a new table object 1374 with alias name. 1375 """ 1376 other = copy.copy(table) 1377 other['_ot'] = other._tablename 1378 other['ALL'] = SQLALL(other) 1379 other['_tablename'] = alias 1380 for fieldname in other.fields: 1381 other[fieldname] = copy.copy(other[fieldname]) 1382 other[fieldname]._tablename = alias 1383 other[fieldname].tablename = alias 1384 other[fieldname].table = other 1385 table._db[alias] = other 1386 return other
1387
1388 - def _truncate(self, table, mode=''):
1389 tablename = table._tablename 1390 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
1391
1392 - def truncate(self, table, mode= ' '):
1393 # Prepare functions "write_to_logfile" and "close_logfile" 1394 if table._dbt: 1395 logfile = self.file_open(table._loggername, 'a') 1396 else: 1397 class Logfile(object): 1398 def write(self, value): 1399 pass
1400 def close(self): 1401 pass 1402 logfile = Logfile() 1403 1404 try: 1405 queries = table._db._adapter._truncate(table, mode) 1406 for query in queries: 1407 logfile.write(query + '\n') 1408 self.execute(query) 1409 table._db.commit() 1410 logfile.write('success!\n') 1411 finally: 1412 logfile.close() 1413
1414 - def _update(self, tablename, query, fields):
1415 if query: 1416 if use_common_filters(query): 1417 query = self.common_filter(query, [tablename]) 1418 sql_w = ' WHERE ' + self.expand(query) 1419 else: 1420 sql_w = '' 1421 sql_v = ','.join(['%s=%s' % (field.name, 1422 self.expand(value, field.type)) \ 1423 for (field, value) in fields]) 1424 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1425
1426 - def update(self, tablename, query, fields):
1427 sql = self._update(tablename, query, fields) 1428 self.execute(sql) 1429 try: 1430 return self.cursor.rowcount 1431 except: 1432 return None
1433
1434 - def _delete(self, tablename, query):
1435 if query: 1436 if use_common_filters(query): 1437 query = self.common_filter(query, [tablename]) 1438 sql_w = ' WHERE ' + self.expand(query) 1439 else: 1440 sql_w = '' 1441 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1442
1443 - def delete(self, tablename, query):
1444 sql = self._delete(tablename, query) 1445 ### special code to handle CASCADE in SQLite & SpatiaLite 1446 db = self.db 1447 table = db[tablename] 1448 if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by: 1449 deleted = [x[table._id.name] for x in db(query).select(table._id)] 1450 ### end special code to handle CASCADE in SQLite & SpatiaLite 1451 self.execute(sql) 1452 try: 1453 counter = self.cursor.rowcount 1454 except: 1455 counter = None 1456 ### special code to handle CASCADE in SQLite & SpatiaLite 1457 if self.dbengine in ('sqlite', 'spatialite') and counter: 1458 for field in table._referenced_by: 1459 if field.type=='reference '+table._tablename \ 1460 and field.ondelete=='CASCADE': 1461 db(field.belongs(deleted)).delete() 1462 ### end special code to handle CASCADE in SQLite & SpatiaLite 1463 return counter
1464
1465 - def get_table(self, query):
1466 tablenames = self.tables(query) 1467 if len(tablenames)==1: 1468 return tablenames[0] 1469 elif len(tablenames)<1: 1470 raise RuntimeError("No table selected") 1471 else: 1472 raise RuntimeError("Too many tables selected")
1473
1474 - def expand_all(self, fields, tablenames):
1475 db = self.db 1476 new_fields = [] 1477 append = new_fields.append 1478 for item in fields: 1479 if isinstance(item,SQLALL): 1480 new_fields += item._table 1481 elif isinstance(item,str): 1482 if REGEX_TABLE_DOT_FIELD.match(item): 1483 tablename,fieldname = item.split('.') 1484 append(db[tablename][fieldname]) 1485 else: 1486 append(Expression(db,lambda item=item:item)) 1487 else: 1488 append(item) 1489 # ## if no fields specified take them all from the requested tables 1490 if not new_fields: 1491 for table in tablenames: 1492 for field in db[table]: 1493 append(field) 1494 return new_fields
1495
1496 - def _select(self, query, fields, attributes):
1497 tables = self.tables 1498 for key in set(attributes.keys())-SELECT_ARGS: 1499 raise SyntaxError('invalid select attribute: %s' % key) 1500 args_get = attributes.get 1501 tablenames = tables(query) 1502 for field in fields: 1503 if isinstance(field, basestring) \ 1504 and REGEX_TABLE_DOT_FIELD.match(field): 1505 tn,fn = field.split('.') 1506 field = self.db[tn][fn] 1507 for tablename in tables(field): 1508 if not tablename in tablenames: 1509 tablenames.append(tablename) 1510 1511 if len(tablenames) < 1: 1512 raise SyntaxError('Set: no tables selected') 1513 self._colnames = map(self.expand, fields) 1514 def geoexpand(field): 1515 if isinstance(field.type,str) and field.type.startswith('geometry'): 1516 field = field.st_astext() 1517 return self.expand(field)
1518 sql_f = ', '.join(map(geoexpand, fields)) 1519 sql_o = '' 1520 sql_s = '' 1521 left = args_get('left', False) 1522 inner_join = args_get('join', False) 1523 distinct = args_get('distinct', False) 1524 groupby = args_get('groupby', False) 1525 orderby = args_get('orderby', False) 1526 having = args_get('having', False) 1527 limitby = args_get('limitby', False) 1528 for_update = args_get('for_update', False) 1529 if self.can_select_for_update is False and for_update is True: 1530 raise SyntaxError('invalid select attribute: for_update') 1531 if distinct is True: 1532 sql_s += 'DISTINCT' 1533 elif distinct: 1534 sql_s += 'DISTINCT ON (%s)' % distinct 1535 if inner_join: 1536 icommand = self.JOIN() 1537 if not isinstance(inner_join, (tuple, list)): 1538 inner_join = [inner_join] 1539 ijoint = [t._tablename for t in inner_join 1540 if not isinstance(t,Expression)] 1541 ijoinon = [t for t in inner_join if isinstance(t, Expression)] 1542 itables_to_merge={} #issue 490 1543 [itables_to_merge.update( 1544 dict.fromkeys(tables(t))) for t in ijoinon] 1545 ijoinont = [t.first._tablename for t in ijoinon] 1546 [itables_to_merge.pop(t) for t in ijoinont 1547 if t in itables_to_merge] #issue 490 1548 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys() 1549 iexcluded = [t for t in tablenames 1550 if not t in iimportant_tablenames] 1551 if left: 1552 join = attributes['left'] 1553 command = self.LEFT_JOIN() 1554 if not isinstance(join, (tuple, list)): 1555 join = [join] 1556 joint = [t._tablename for t in join 1557 if not isinstance(t, Expression)] 1558 joinon = [t for t in join if isinstance(t, Expression)] 1559 #patch join+left patch (solves problem with ordering in left joins) 1560 tables_to_merge={} 1561 [tables_to_merge.update( 1562 dict.fromkeys(tables(t))) for t in joinon] 1563 joinont = [t.first._tablename for t in joinon] 1564 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge] 1565 important_tablenames = joint + joinont + tables_to_merge.keys() 1566 excluded = [t for t in tablenames 1567 if not t in important_tablenames ] 1568 else: 1569 excluded = tablenames 1570 1571 if use_common_filters(query): 1572 query = self.common_filter(query,excluded) 1573 sql_w = ' WHERE ' + self.expand(query) if query else '' 1574 1575 def alias(t): 1576 return str(self.db[t]) 1577 if inner_join and not left: 1578 sql_t = ', '.join([alias(t) for t in iexcluded + \ 1579 itables_to_merge.keys()]) 1580 for t in ijoinon: 1581 sql_t += ' %s %s' % (icommand, str(t)) 1582 elif not inner_join and left: 1583 sql_t = ', '.join([alias(t) for t in excluded + \ 1584 tables_to_merge.keys()]) 1585 if joint: 1586 sql_t += ' %s %s' % (command, ','.join([t for t in joint])) 1587 for t in joinon: 1588 sql_t += ' %s %s' % (command, str(t)) 1589 elif inner_join and left: 1590 all_tables_in_query = set(important_tablenames + \ 1591 iimportant_tablenames + \ 1592 tablenames) 1593 tables_in_joinon = set(joinont + ijoinont) 1594 tables_not_in_joinon = \ 1595 all_tables_in_query.difference(tables_in_joinon) 1596 sql_t = ','.join([alias(t) for t in tables_not_in_joinon]) 1597 for t in ijoinon: 1598 sql_t += ' %s %s' % (icommand, str(t)) 1599 if joint: 1600 sql_t += ' %s %s' % (command, ','.join([t for t in joint])) 1601 for t in joinon: 1602 sql_t += ' %s %s' % (command, str(t)) 1603 else: 1604 sql_t = ', '.join(alias(t) for t in tablenames) 1605 if groupby: 1606 if isinstance(groupby, (list, tuple)): 1607 groupby = xorify(groupby) 1608 sql_o += ' GROUP BY %s' % self.expand(groupby) 1609 if having: 1610 sql_o += ' HAVING %s' % attributes['having'] 1611 if orderby: 1612 if isinstance(orderby, (list, tuple)): 1613 orderby = xorify(orderby) 1614 if str(orderby) == '<random>': 1615 sql_o += ' ORDER BY %s' % self.RANDOM() 1616 else: 1617 sql_o += ' ORDER BY %s' % self.expand(orderby) 1618 if limitby: 1619 if not orderby and tablenames: 1620 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])]) 1621 # oracle does not support limitby 1622 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby) 1623 if for_update and self.can_select_for_update is True: 1624 sql = sql.rstrip(';') + ' FOR UPDATE;' 1625 return sql 1626
1627 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1628 if limitby: 1629 (lmin, lmax) = limitby 1630 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin) 1631 return 'SELECT %s %s FROM %s%s%s;' % \ 1632 (sql_s, sql_f, sql_t, sql_w, sql_o)
1633
1634 - def _fetchall(self):
1635 return self.cursor.fetchall()
1636
1637 - def _select_aux(self,sql,fields,attributes):
1638 args_get = attributes.get 1639 cache = args_get('cache',None) 1640 if not cache: 1641 self.execute(sql) 1642 rows = self._fetchall() 1643 else: 1644 (cache_model, time_expire) = cache 1645 key = self.uri + '/' + sql + '/rows' 1646 if len(key)>200: key = hashlib_md5(key).hexdigest() 1647 def _select_aux2(): 1648 self.execute(sql) 1649 return self._fetchall()
1650 rows = cache_model(key,_select_aux2,time_expire) 1651 if isinstance(rows,tuple): 1652 rows = list(rows) 1653 limitby = args_get('limitby', None) or (0,) 1654 rows = self.rowslice(rows,limitby[0],None) 1655 processor = args_get('processor',self.parse) 1656 cacheable = args_get('cacheable',False) 1657 return processor(rows,fields,self._colnames,cacheable=cacheable) 1658
1659 - def select(self, query, fields, attributes):
1660 """ 1661 Always returns a Rows object, possibly empty. 1662 """ 1663 sql = self._select(query, fields, attributes) 1664 cache = attributes.get('cache', None) 1665 if cache and attributes.get('cacheable',False): 1666 del attributes['cache'] 1667 (cache_model, time_expire) = cache 1668 key = self.uri + '/' + sql 1669 if len(key)>200: key = hashlib_md5(key).hexdigest() 1670 args = (sql,fields,attributes) 1671 return cache_model( 1672 key, 1673 lambda self=self,args=args:self._select_aux(*args), 1674 time_expire) 1675 else: 1676 return self._select_aux(sql,fields,attributes)
1677
1678 - def _count(self, query, distinct=None):
1679 tablenames = self.tables(query) 1680 if query: 1681 if use_common_filters(query): 1682 query = self.common_filter(query, tablenames) 1683 sql_w = ' WHERE ' + self.expand(query) 1684 else: 1685 sql_w = '' 1686 sql_t = ','.join(tablenames) 1687 if distinct: 1688 if isinstance(distinct,(list, tuple)): 1689 distinct = xorify(distinct) 1690 sql_d = self.expand(distinct) 1691 return 'SELECT count(DISTINCT %s) FROM %s%s;' % \ 1692 (sql_d, sql_t, sql_w) 1693 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1694
1695 - def count(self, query, distinct=None):
1696 self.execute(self._count(query, distinct)) 1697 return self.cursor.fetchone()[0]
1698
1699 - def tables(self, *queries):
1700 tables = set() 1701 for query in queries: 1702 if isinstance(query, Field): 1703 tables.add(query.tablename) 1704 elif isinstance(query, (Expression, Query)): 1705 if not query.first is None: 1706 tables = tables.union(self.tables(query.first)) 1707 if not query.second is None: 1708 tables = tables.union(self.tables(query.second)) 1709 return list(tables)
1710
1711 - def commit(self):
1712 if self.connection: return self.connection.commit()
1713
1714 - def rollback(self):
1715 if self.connection: return self.connection.rollback()
1716
1717 - def close_connection(self):
1718 if self.connection: return self.connection.close()
1719
1720 - def distributed_transaction_begin(self, key):
1721 return
1722
1723 - def prepare(self, key):
1724 if self.connection: self.connection.prepare()
1725
1726 - def commit_prepared(self, key):
1727 if self.connection: self.connection.commit()
1728
1729 - def rollback_prepared(self, key):
1730 if self.connection: self.connection.rollback()
1731
1732 - def concat_add(self, tablename):
1733 return ', ADD '
1734
1735 - def constraint_name(self, table, fieldname):
1736 return '%s_%s__constraint' % (table,fieldname)
1737
1738 - def create_sequence_and_triggers(self, query, table, **args):
1739 self.execute(query)
1740
1741 - def log_execute(self, *a, **b):
1742 if not self.connection: return None 1743 command = a[0] 1744 if self.db._debug: 1745 LOGGER.debug('SQL: %s' % command) 1746 self.db._lastsql = command 1747 t0 = time.time() 1748 ret = self.cursor.execute(*a, **b) 1749 self.db._timings.append((command,time.time()-t0)) 1750 del self.db._timings[:-TIMINGSSIZE] 1751 return ret
1752
1753 - def execute(self, *a, **b):
1754 return self.log_execute(*a, **b)
1755
1756 - def represent(self, obj, fieldtype):
1757 field_is_type = fieldtype.startswith 1758 if isinstance(obj, CALLABLETYPES): 1759 obj = obj() 1760 if isinstance(fieldtype, SQLCustomType): 1761 value = fieldtype.encoder(obj) 1762 if fieldtype.type in ('string','text', 'json'): 1763 return self.adapt(value) 1764 return value 1765 if isinstance(obj, (Expression, Field)): 1766 return str(obj) 1767 if field_is_type('list:'): 1768 if not obj: 1769 obj = [] 1770 elif not isinstance(obj, (list, tuple)): 1771 obj = [obj] 1772 if field_is_type('list:string'): 1773 obj = map(str,obj) 1774 else: 1775 obj = map(int,obj) 1776 # we don't want to bar_encode json objects 1777 if isinstance(obj, (list, tuple)) and (not fieldtype == "json"): 1778 obj = bar_encode(obj) 1779 if obj is None: 1780 return 'NULL' 1781 if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']: 1782 return 'NULL' 1783 r = self.represent_exceptions(obj, fieldtype) 1784 if not r is None: 1785 return r 1786 if fieldtype == 'boolean': 1787 if obj and not str(obj)[:1].upper() in '0F': 1788 return self.smart_adapt(self.TRUE) 1789 else: 1790 return self.smart_adapt(self.FALSE) 1791 if fieldtype == 'id' or fieldtype == 'integer': 1792 return str(int(obj)) 1793 if field_is_type('decimal'): 1794 return str(obj) 1795 elif field_is_type('reference'): # reference 1796 if fieldtype.find('.')>0: 1797 return repr(obj) 1798 elif isinstance(obj, (Row, Reference)): 1799 return str(obj['id']) 1800 return str(int(obj)) 1801 elif fieldtype == 'double': 1802 return repr(float(obj)) 1803 if isinstance(obj, unicode): 1804 obj = obj.encode(self.db_codec) 1805 if fieldtype == 'blob': 1806 obj = base64.b64encode(str(obj)) 1807 elif fieldtype == 'date': 1808 if isinstance(obj, (datetime.date, datetime.datetime)): 1809 obj = obj.isoformat()[:10] 1810 else: 1811 obj = str(obj) 1812 elif fieldtype == 'datetime': 1813 if isinstance(obj, datetime.datetime): 1814 obj = obj.isoformat(self.T_SEP)[:19] 1815 elif isinstance(obj, datetime.date): 1816 obj = obj.isoformat()[:10]+' 00:00:00' 1817 else: 1818 obj = str(obj) 1819 elif fieldtype == 'time': 1820 if isinstance(obj, datetime.time): 1821 obj = obj.isoformat()[:10] 1822 else: 1823 obj = str(obj) 1824 elif fieldtype == 'json': 1825 if not self.native_json: 1826 if have_serializers: 1827 obj = serializers.json(obj) 1828 elif simplejson: 1829 obj = simplejson.dumps(items) 1830 else: 1831 raise RuntimeError("missing simplejson") 1832 if not isinstance(obj,bytes): 1833 obj = bytes(obj) 1834 try: 1835 obj.decode(self.db_codec) 1836 except: 1837 obj = obj.decode('latin1').encode(self.db_codec) 1838 return self.adapt(obj)
1839
1840 - def represent_exceptions(self, obj, fieldtype):
1841 return None
1842
1843 - def lastrowid(self, table):
1844 return None
1845
1846 - def integrity_error_class(self):
1847 return type(None)
1848
1849 - def rowslice(self, rows, minimum=0, maximum=None):
1850 """ 1851 By default this function does nothing; 1852 overload when db does not do slicing. 1853 """ 1854 return rows
1855
1856 - def parse_value(self, value, field_type, blob_decode=True):
1857 if field_type != 'blob' and isinstance(value, str): 1858 try: 1859 value = value.decode(self.db._db_codec) 1860 except Exception: 1861 pass 1862 if isinstance(value, unicode): 1863 value = value.encode('utf-8') 1864 if isinstance(field_type, SQLCustomType): 1865 value = field_type.decoder(value) 1866 if not isinstance(field_type, str) or value is None: 1867 return value 1868 elif field_type in ('string', 'text', 'password', 'upload', 'dict'): 1869 return value 1870 elif field_type.startswith('geo'): 1871 return value 1872 elif field_type == 'blob' and not blob_decode: 1873 return value 1874 else: 1875 key = REGEX_TYPE.match(field_type).group(0) 1876 return self.parsemap[key](value,field_type)
1877
1878 - def parse_reference(self, value, field_type):
1879 referee = field_type[10:].strip() 1880 if not '.' in referee: 1881 value = Reference(value) 1882 value._table, value._record = self.db[referee], None 1883 return value
1884
1885 - def parse_boolean(self, value, field_type):
1886 return value == True or str(value)[:1].lower() == 't'
1887
1888 - def parse_date(self, value, field_type):
1889 if isinstance(value, datetime.datetime): 1890 return value.date() 1891 if not isinstance(value, (datetime.date,datetime.datetime)): 1892 (y, m, d) = map(int, str(value)[:10].strip().split('-')) 1893 value = datetime.date(y, m, d) 1894 return value
1895
1896 - def parse_time(self, value, field_type):
1897 if not isinstance(value, datetime.time): 1898 time_items = map(int,str(value)[:8].strip().split(':')[:3]) 1899 if len(time_items) == 3: 1900 (h, mi, s) = time_items 1901 else: 1902 (h, mi, s) = time_items + [0] 1903 value = datetime.time(h, mi, s) 1904 return value
1905
1906 - def parse_datetime(self, value, field_type):
1907 if not isinstance(value, datetime.datetime): 1908 value = str(value) 1909 date_part,time_part,timezone = value[:10],value[11:19],value[19:] 1910 if '+' in timezone: 1911 ms,tz = timezone.split('+') 1912 h,m = tz.split(':') 1913 dt = datetime.timedelta(seconds=3600*int(h)+60*int(m)) 1914 elif '-' in timezone: 1915 ms,tz = timezone.split('-') 1916 h,m = tz.split(':') 1917 dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m)) 1918 else: 1919 dt = None 1920 (y, m, d) = map(int,date_part.split('-')) 1921 time_parts = time_part and time_part.split(':')[:3] or (0,0,0) 1922 while len(time_parts)<3: time_parts.append(0) 1923 time_items = map(int,time_parts) 1924 (h, mi, s) = time_items 1925 value = datetime.datetime(y, m, d, h, mi, s) 1926 if dt: 1927 value = value + dt 1928 return value
1929
1930 - def parse_blob(self, value, field_type):
1931 return base64.b64decode(str(value))
1932
1933 - def parse_decimal(self, value, field_type):
1934 decimals = int(field_type[8:-1].split(',')[-1]) 1935 if self.dbengine in ('sqlite', 'spatialite'): 1936 value = ('%.' + str(decimals) + 'f') % value 1937 if not isinstance(value, decimal.Decimal): 1938 value = decimal.Decimal(str(value)) 1939 return value
1940
1941 - def parse_list_integers(self, value, field_type):
1942 if not self.dbengine=='google:datastore': 1943 value = bar_decode_integer(value) 1944 return value
1945
1946 - def parse_list_references(self, value, field_type):
1947 if not self.dbengine=='google:datastore': 1948 value = bar_decode_integer(value) 1949 return [self.parse_reference(r, field_type[5:]) for r in value]
1950
1951 - def parse_list_strings(self, value, field_type):
1952 if not self.dbengine=='google:datastore': 1953 value = bar_decode_string(value) 1954 return value
1955
1956 - def parse_id(self, value, field_type):
1957 return int(value)
1958
1959 - def parse_integer(self, value, field_type):
1960 return int(value)
1961
1962 - def parse_double(self, value, field_type):
1963 return float(value)
1964
1965 - def parse_json(self, value, field_type):
1966 if not self.native_json: 1967 if not isinstance(value, basestring): 1968 raise RuntimeError('json data not a string') 1969 if isinstance(value, unicode): 1970 value = value.encode('utf-8') 1971 if have_serializers: 1972 value = serializers.loads_json(value) 1973 elif simplejson: 1974 value = simplejson.loads(value) 1975 else: 1976 raise RuntimeError("missing simplejson") 1977 return value
1978
1979 - def build_parsemap(self):
1980 self.parsemap = { 1981 'id':self.parse_id, 1982 'integer':self.parse_integer, 1983 'bigint':self.parse_integer, 1984 'float':self.parse_double, 1985 'double':self.parse_double, 1986 'reference':self.parse_reference, 1987 'boolean':self.parse_boolean, 1988 'date':self.parse_date, 1989 'time':self.parse_time, 1990 'datetime':self.parse_datetime, 1991 'blob':self.parse_blob, 1992 'decimal':self.parse_decimal, 1993 'json':self.parse_json, 1994 'list:integer':self.parse_list_integers, 1995 'list:reference':self.parse_list_references, 1996 'list:string':self.parse_list_strings, 1997 }
1998
1999 - def parse(self, rows, fields, colnames, blob_decode=True, 2000 cacheable = False):
2001 self.build_parsemap() 2002 db = self.db 2003 virtualtables = [] 2004 new_rows = [] 2005 tmps = [] 2006 for colname in colnames: 2007 if not REGEX_TABLE_DOT_FIELD.match(colname): 2008 tmps.append(None) 2009 else: 2010 (tablename, fieldname) = colname.split('.') 2011 table = db[tablename] 2012 field = table[fieldname] 2013 ft = field.type 2014 tmps.append((tablename,fieldname,table,field,ft)) 2015 for (i,row) in enumerate(rows): 2016 new_row = Row() 2017 for (j,colname) in enumerate(colnames): 2018 value = row[j] 2019 tmp = tmps[j] 2020 if tmp: 2021 (tablename,fieldname,table,field,ft) = tmp 2022 if tablename in new_row: 2023 colset = new_row[tablename] 2024 else: 2025 colset = new_row[tablename] = Row() 2026 if tablename not in virtualtables: 2027 virtualtables.append(tablename) 2028 value = self.parse_value(value,ft,blob_decode) 2029 if field.filter_out: 2030 value = field.filter_out(value) 2031 colset[fieldname] = value 2032 2033 # for backward compatibility 2034 if ft=='id' and fieldname!='id' and \ 2035 not 'id' in table.fields: 2036 colset['id'] = value 2037 2038 if ft == 'id' and not cacheable: 2039 # temporary hack to deal with 2040 # GoogleDatastoreAdapter 2041 # references 2042 if isinstance(self, GoogleDatastoreAdapter): 2043 id = value.key().id_or_name() 2044 colset[fieldname] = id 2045 colset.gae_item = value 2046 else: 2047 id = value 2048 colset.update_record = RecordUpdater(colset,table,id) 2049 colset.delete_record = RecordDeleter(table,id) 2050 for rfield in table._referenced_by: 2051 referee_link = db._referee_name and \ 2052 db._referee_name % dict( 2053 table=rfield.tablename,field=rfield.name) 2054 if referee_link and not referee_link in colset: 2055 colset[referee_link] = LazySet(rfield,id) 2056 else: 2057 if not '_extra' in new_row: 2058 new_row['_extra'] = Row() 2059 new_row['_extra'][colname] = \ 2060 self.parse_value(value, 2061 fields[j].type,blob_decode) 2062 new_column_name = \ 2063 REGEX_SELECT_AS_PARSER.search(colname) 2064 if not new_column_name is None: 2065 column_name = new_column_name.groups(0) 2066 setattr(new_row,column_name[0],value) 2067 new_rows.append(new_row) 2068 rowsobj = Rows(db, new_rows, colnames, rawrows=rows) 2069 2070 for tablename in virtualtables: 2071 ### new style virtual fields 2072 table = db[tablename] 2073 fields_virtual = [(f,v) for (f,v) in table.iteritems() 2074 if isinstance(v,FieldVirtual)] 2075 fields_lazy = [(f,v) for (f,v) in table.iteritems() 2076 if isinstance(v,FieldMethod)] 2077 if fields_virtual or fields_lazy: 2078 for row in rowsobj.records: 2079 box = row[tablename] 2080 for f,v in fields_virtual: 2081 box[f] = v.f(row) 2082 for f,v in fields_lazy: 2083 box[f] = (v.handler or VirtualCommand)(v.f,row) 2084 2085 ### old style virtual fields 2086 for item in table.virtualfields: 2087 try: 2088 rowsobj = rowsobj.setvirtualfields(**{tablename:item}) 2089 except (KeyError, AttributeError): 2090 # to avoid breaking virtualfields when partial select 2091 pass 2092 return rowsobj
2093
2094 - def common_filter(self, query, tablenames):
2095 tenant_fieldname = self.db._request_tenant 2096 2097 for tablename in tablenames: 2098 table = self.db[tablename] 2099 2100 # deal with user provided filters 2101 if table._common_filter != None: 2102 query = query & table._common_filter(query) 2103 2104 # deal with multi_tenant filters 2105 if tenant_fieldname in table: 2106 default = table[tenant_fieldname].default 2107 if not default is None: 2108 newquery = table[tenant_fieldname] == default 2109 if query is None: 2110 query = newquery 2111 else: 2112 query = query & newquery 2113 return query
2114
2115 - def CASE(self,query,t,f):
2116 def represent(x): 2117 types = {type(True):'boolean',type(0):'integer',type(1.0):'double'} 2118 if x is None: return 'NULL' 2119 elif isinstance(x,Expression): return str(x) 2120 else: return self.represent(x,types.get(type(x),'string'))
2121 return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \ 2122 (self.expand(query),represent(t),represent(f))) 2123
2124 ################################################################################### 2125 # List of all the available adapters; they all extend BaseAdapter. 2126 ################################################################################### 2127 2128 -class SQLiteAdapter(BaseAdapter):
2129 drivers = ('sqlite2','sqlite3') 2130 2131 can_select_for_update = None # support ourselves with BEGIN TRANSACTION 2132
2133 - def EXTRACT(self,field,what):
2134 return "web2py_extract('%s',%s)" % (what, self.expand(field))
2135 2136 @staticmethod
2137 - def web2py_extract(lookup, s):
2138 table = { 2139 'year': (0, 4), 2140 'month': (5, 7), 2141 'day': (8, 10), 2142 'hour': (11, 13), 2143 'minute': (14, 16), 2144 'second': (17, 19), 2145 } 2146 try: 2147 if lookup != 'epoch': 2148 (i, j) = table[lookup] 2149 return int(s[i:j]) 2150 else: 2151 return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple()) 2152 except: 2153 return None
2154 2155 @staticmethod
2156 - def web2py_regexp(expression, item):
2157 return re.compile(expression).search(item) is not None
2158
2159 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8', 2160 credential_decoder=IDENTITY, driver_args={}, 2161 adapter_args={}, do_connect=True, after_connection=None):
2162 self.db = db 2163 self.dbengine = "sqlite" 2164 self.uri = uri 2165 if do_connect: self.find_driver(adapter_args) 2166 self.pool_size = 0 2167 self.folder = folder 2168 self.db_codec = db_codec 2169 self._after_connection = after_connection 2170 self.find_or_make_work_folder() 2171 path_encoding = sys.getfilesystemencoding() \ 2172 or locale.getdefaultlocale()[1] or 'utf8' 2173 if uri.startswith('sqlite:memory'): 2174 dbpath = ':memory:' 2175 else: 2176 dbpath = uri.split('://',1)[1] 2177 if dbpath[0] != '/': 2178 if PYTHON_VERSION == 2: 2179 dbpath = pjoin( 2180 self.folder.decode(path_encoding).encode('utf8'), dbpath) 2181 else: 2182 dbpath = pjoin(self.folder, dbpath) 2183 if not 'check_same_thread' in driver_args: 2184 driver_args['check_same_thread'] = False 2185 if not 'detect_types' in driver_args and do_connect: 2186 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES 2187 def connector(dbpath=dbpath, driver_args=driver_args): 2188 return self.driver.Connection(dbpath, **driver_args)
2189 self.connector = connector 2190 if do_connect: self.reconnect()
2191
2192 - def after_connection(self):
2193 self.connection.create_function('web2py_extract', 2, 2194 SQLiteAdapter.web2py_extract) 2195 self.connection.create_function("REGEXP", 2, 2196 SQLiteAdapter.web2py_regexp)
2197
2198 - def _truncate(self, table, mode=''):
2199 tablename = table._tablename 2200 return ['DELETE FROM %s;' % tablename, 2201 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
2202
2203 - def lastrowid(self, table):
2204 return self.cursor.lastrowid
2205
2206 - def REGEXP(self,first,second):
2207 return '(%s REGEXP %s)' % (self.expand(first), 2208 self.expand(second,'string'))
2209
2210 - def select(self, query, fields, attributes):
2211 """ 2212 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION. 2213 Note that the entire database, rather than one record, is locked 2214 (it will be locked eventually anyway by the following UPDATE). 2215 """ 2216 if attributes.get('for_update', False) and not 'cache' in attributes: 2217 self.execute('BEGIN IMMEDIATE TRANSACTION;') 2218 return super(SQLiteAdapter, self).select(query, fields, attributes)
2219
2220 -class SpatiaLiteAdapter(SQLiteAdapter):
2221 drivers = ('sqlite3','sqlite2') 2222 2223 types = copy.copy(BaseAdapter.types) 2224 types.update(geometry='GEOMETRY') 2225
2226 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8', 2227 credential_decoder=IDENTITY, driver_args={}, 2228 adapter_args={}, do_connect=True, srid=4326, after_connection=None):
2229 self.db = db 2230 self.dbengine = "spatialite" 2231 self.uri = uri 2232 if do_connect: self.find_driver(adapter_args) 2233 self.pool_size = 0 2234 self.folder = folder 2235 self.db_codec = db_codec 2236 self._after_connection = after_connection 2237 self.find_or_make_work_folder() 2238 self.srid = srid 2239 path_encoding = sys.getfilesystemencoding() \ 2240 or locale.getdefaultlocale()[1] or 'utf8' 2241 if uri.startswith('spatialite:memory'): 2242 dbpath = ':memory:' 2243 else: 2244 dbpath = uri.split('://',1)[1] 2245 if dbpath[0] != '/': 2246 dbpath = pjoin( 2247 self.folder.decode(path_encoding).encode('utf8'), dbpath) 2248 if not 'check_same_thread' in driver_args: 2249 driver_args['check_same_thread'] = False 2250 if not 'detect_types' in driver_args and do_connect: 2251 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES 2252 def connector(dbpath=dbpath, driver_args=driver_args): 2253 return self.driver.Connection(dbpath, **driver_args)
2254 self.connector = connector 2255 if do_connect: self.reconnect()
2256
2257 - def after_connection(self):
2258 self.connection.enable_load_extension(True) 2259 # for Windows, rename libspatialite-2.dll to libspatialite.dll 2260 # Linux uses libspatialite.so 2261 # Mac OS X uses libspatialite.dylib 2262 libspatialite = SPATIALLIBS[platform.system()] 2263 self.execute(r'SELECT load_extension("%s");') % libspatialite 2264 2265 self.connection.create_function('web2py_extract', 2, 2266 SQLiteAdapter.web2py_extract) 2267 self.connection.create_function("REGEXP", 2, 2268 SQLiteAdapter.web2py_regexp)
2269 2270 # GIS functions 2271
2272 - def ST_ASGEOJSON(self, first, second):
2273 return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first), 2274 second['precision'], second['options'])
2275
2276 - def ST_ASTEXT(self, first):
2277 return 'AsText(%s)' %(self.expand(first))
2278
2279 - def ST_CONTAINS(self, first, second):
2280 return 'Contains(%s,%s)' %(self.expand(first), 2281 self.expand(second, first.type))
2282
2283 - def ST_DISTANCE(self, first, second):
2284 return 'Distance(%s,%s)' %(self.expand(first), 2285 self.expand(second, first.type))
2286
2287 - def ST_EQUALS(self, first, second):
2288 return 'Equals(%s,%s)' %(self.expand(first), 2289 self.expand(second, first.type))
2290
2291 - def ST_INTERSECTS(self, first, second):
2292 return 'Intersects(%s,%s)' %(self.expand(first), 2293 self.expand(second, first.type))
2294
2295 - def ST_OVERLAPS(self, first, second):
2296 return 'Overlaps(%s,%s)' %(self.expand(first), 2297 self.expand(second, first.type))
2298
2299 - def ST_SIMPLIFY(self, first, second):
2300 return 'Simplify(%s,%s)' %(self.expand(first), 2301 self.expand(second, 'double'))
2302
2303 - def ST_TOUCHES(self, first, second):
2304 return 'Touches(%s,%s)' %(self.expand(first), 2305 self.expand(second, first.type))
2306
2307 - def ST_WITHIN(self, first, second):
2308 return 'Within(%s,%s)' %(self.expand(first), 2309 self.expand(second, first.type))
2310
2311 - def represent(self, obj, fieldtype):
2312 field_is_type = fieldtype.startswith 2313 if field_is_type('geo'): 2314 srid = 4326 # Spatialite default srid for geometry 2315 geotype, parms = fieldtype[:-1].split('(') 2316 parms = parms.split(',') 2317 if len(parms) >= 2: 2318 schema, srid = parms[:2] 2319 # if field_is_type('geometry'): 2320 value = "ST_GeomFromText('%s',%s)" %(obj, srid) 2321 # elif field_is_type('geography'): 2322 # value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj) 2323 # else: 2324 # raise SyntaxError, 'Invalid field type %s' %fieldtype 2325 return value 2326 return BaseAdapter.represent(self, obj, fieldtype)
2327
2328 2329 -class JDBCSQLiteAdapter(SQLiteAdapter):
2330 drivers = ('zxJDBC_sqlite',) 2331
2332 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8', 2333 credential_decoder=IDENTITY, driver_args={}, 2334 adapter_args={}, do_connect=True, after_connection=None):
2335 self.db = db 2336 self.dbengine = "sqlite" 2337 self.uri = uri 2338 if do_connect: self.find_driver(adapter_args) 2339 self.pool_size = pool_size 2340 self.folder = folder 2341 self.db_codec = db_codec 2342 self._after_connection = after_connection 2343 self.find_or_make_work_folder() 2344 path_encoding = sys.getfilesystemencoding() \ 2345 or locale.getdefaultlocale()[1] or 'utf8' 2346 if uri.startswith('sqlite:memory'): 2347 dbpath = ':memory:' 2348 else: 2349 dbpath = uri.split('://',1)[1] 2350 if dbpath[0] != '/': 2351 dbpath = pjoin( 2352 self.folder.decode(path_encoding).encode('utf8'), dbpath) 2353 def connector(dbpath=dbpath,driver_args=driver_args): 2354 return self.driver.connect( 2355 self.driver.getConnection('jdbc:sqlite:'+dbpath), 2356 **driver_args)
2357 self.connector = connector 2358 if do_connect: self.reconnect()
2359
2360 - def after_connection(self):
2361 # FIXME http://www.zentus.com/sqlitejdbc/custom_functions.html for UDFs 2362 self.connection.create_function('web2py_extract', 2, 2363 SQLiteAdapter.web2py_extract)
2364
2365 - def execute(self, a):
2366 return self.log_execute(a)
2367
2368 2369 -class MySQLAdapter(BaseAdapter):
2370 drivers = ('MySQLdb','pymysql') 2371 2372 maxcharlength = 255 2373 commit_on_alter_table = True 2374 support_distributed_transaction = True 2375 types = { 2376 'boolean': 'CHAR(1)', 2377 'string': 'VARCHAR(%(length)s)', 2378 'text': 'LONGTEXT', 2379 'json': 'LONGTEXT', 2380 'password': 'VARCHAR(%(length)s)', 2381 'blob': 'LONGBLOB', 2382 'upload': 'VARCHAR(%(length)s)', 2383 'integer': 'INT', 2384 'bigint': 'BIGINT', 2385 'float': 'FLOAT', 2386 'double': 'DOUBLE', 2387 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 2388 'date': 'DATE', 2389 'time': 'TIME', 2390 'datetime': 'DATETIME', 2391 'id': 'INT AUTO_INCREMENT NOT NULL', 2392 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2393 'list:integer': 'LONGTEXT', 2394 'list:string': 'LONGTEXT', 2395 'list:reference': 'LONGTEXT', 2396 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL', 2397 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2398 } 2399
2400 - def varquote(self,name):
2401 return varquote_aux(name,'`%s`')
2402
2403 - def RANDOM(self):
2404 return 'RAND()'
2405
2406 - def SUBSTRING(self,field,parameters):
2407 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), 2408 parameters[0], parameters[1])
2409
2410 - def EPOCH(self, first):
2411 return "UNIX_TIMESTAMP(%s)" % self.expand(first)
2412
2413 - def REGEXP(self,first,second):
2414 return '(%s REGEXP %s)' % (self.expand(first), 2415 self.expand(second,'string'))
2416
2417 - def _drop(self,table,mode):
2418 # breaks db integrity but without this mysql does not drop table 2419 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table, 2420 'SET FOREIGN_KEY_CHECKS=1;']
2421
2422 - def _insert_empty(self, table):
2423 return 'INSERT INTO %s VALUES (DEFAULT);' % table
2424
2425 - def distributed_transaction_begin(self,key):
2426 self.execute('XA START;')
2427
2428 - def prepare(self,key):
2429 self.execute("XA END;") 2430 self.execute("XA PREPARE;")
2431
2432 - def commit_prepared(self,ley):
2433 self.execute("XA COMMIT;")
2434
2435 - def rollback_prepared(self,key):
2436 self.execute("XA ROLLBACK;")
2437 2438 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$') 2439
2440 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 2441 credential_decoder=IDENTITY, driver_args={}, 2442 adapter_args={}, do_connect=True, after_connection=None):
2443 self.db = db 2444 self.dbengine = "mysql" 2445 self.uri = uri 2446 if do_connect: self.find_driver(adapter_args,uri) 2447 self.pool_size = pool_size 2448 self.folder = folder 2449 self.db_codec = db_codec 2450 self._after_connection = after_connection 2451 self.find_or_make_work_folder() 2452 ruri = uri.split('://',1)[1] 2453 m = self.REGEX_URI.match(ruri) 2454 if not m: 2455 raise SyntaxError( 2456 "Invalid URI string in DAL: %s" % self.uri) 2457 user = credential_decoder(m.group('user')) 2458 if not user: 2459 raise SyntaxError('User required') 2460 password = credential_decoder(m.group('password')) 2461 if not password: 2462 password = '' 2463 host = m.group('host') 2464 if not host: 2465 raise SyntaxError('Host name required') 2466 db = m.group('db') 2467 if not db: 2468 raise SyntaxError('Database name required') 2469 port = int(m.group('port') or '3306') 2470 charset = m.group('charset') or 'utf8' 2471 driver_args.update(db=db, 2472 user=credential_decoder(user), 2473 passwd=credential_decoder(password), 2474 host=host, 2475 port=port, 2476 charset=charset) 2477 2478 2479 def connector(driver_args=driver_args): 2480 return self.driver.connect(**driver_args)
2481 self.connector = connector 2482 if do_connect: self.reconnect()
2483
2484 - def after_connection(self):
2485 self.execute('SET FOREIGN_KEY_CHECKS=1;') 2486 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2487
2488 - def lastrowid(self,table):
2489 self.execute('select last_insert_id();') 2490 return int(self.cursor.fetchone()[0])
2491
2492 -class PostgreSQLAdapter(BaseAdapter):
2493 drivers = ('psycopg2','pg8000') 2494 2495 support_distributed_transaction = True 2496 types = { 2497 'boolean': 'CHAR(1)', 2498 'string': 'VARCHAR(%(length)s)', 2499 'text': 'TEXT', 2500 'json': 'TEXT', 2501 'password': 'VARCHAR(%(length)s)', 2502 'blob': 'BYTEA', 2503 'upload': 'VARCHAR(%(length)s)', 2504 'integer': 'INTEGER', 2505 'bigint': 'BIGINT', 2506 'float': 'FLOAT', 2507 'double': 'FLOAT8', 2508 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 2509 'date': 'DATE', 2510 'time': 'TIME', 2511 'datetime': 'TIMESTAMP', 2512 'id': 'SERIAL PRIMARY KEY', 2513 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2514 'list:integer': 'TEXT', 2515 'list:string': 'TEXT', 2516 'list:reference': 'TEXT', 2517 'geometry': 'GEOMETRY', 2518 'geography': 'GEOGRAPHY', 2519 'big-id': 'BIGSERIAL PRIMARY KEY', 2520 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2521 } 2522
2523 - def varquote(self,name):
2524 return varquote_aux(name,'"%s"')
2525
2526 - def adapt(self,obj):
2527 if self.driver_name == 'psycopg2': 2528 return psycopg2_adapt(obj).getquoted() 2529 elif self.driver_name == 'pg8000': 2530 return "'%s'" % str(obj).replace("%","%%").replace("'","''") 2531 else: 2532 return "'%s'" % str(obj).replace("'","''")
2533
2534 - def sequence_name(self,table):
2535 return '%s_id_Seq' % table
2536
2537 - def RANDOM(self):
2538 return 'RANDOM()'
2539
2540 - def ADD(self, first, second):
2541 t = first.type 2542 if t in ('text','string','password', 'json', 'upload','blob'): 2543 return '(%s || %s)' % (self.expand(first), self.expand(second, t)) 2544 else: 2545 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
2546
2547 - def distributed_transaction_begin(self,key):
2548 return
2549
2550 - def prepare(self,key):
2551 self.execute("PREPARE TRANSACTION '%s';" % key)
2552
2553 - def commit_prepared(self,key):
2554 self.execute("COMMIT PREPARED '%s';" % key)
2555
2556 - def rollback_prepared(self,key):
2557 self.execute("ROLLBACK PREPARED '%s';" % key)
2558
2559 - def create_sequence_and_triggers(self, query, table, **args):
2560 # following lines should only be executed if table._sequence_name does not exist 2561 # self.execute('CREATE SEQUENCE %s;' % table._sequence_name) 2562 # self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \ 2563 # % (table._tablename, table._fieldname, table._sequence_name)) 2564 self.execute(query)
2565 2566 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$') 2567
2568 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 2569 credential_decoder=IDENTITY, driver_args={}, 2570 adapter_args={}, do_connect=True, srid=4326, 2571 after_connection=None):
2572 self.db = db 2573 self.dbengine = "postgres" 2574 self.uri = uri 2575 if do_connect: self.find_driver(adapter_args,uri) 2576 self.pool_size = pool_size 2577 self.folder = folder 2578 self.db_codec = db_codec 2579 self._after_connection = after_connection 2580 self.srid = srid 2581 self.find_or_make_work_folder() 2582 ruri = uri.split('://',1)[1] 2583 m = self.REGEX_URI.match(ruri) 2584 if not m: 2585 raise SyntaxError("Invalid URI string in DAL") 2586 user = credential_decoder(m.group('user')) 2587 if not user: 2588 raise SyntaxError('User required') 2589 password = credential_decoder(m.group('password')) 2590 if not password: 2591 password = '' 2592 host = m.group('host') 2593 if not host: 2594 raise SyntaxError('Host name required') 2595 db = m.group('db') 2596 if not db: 2597 raise SyntaxError('Database name required') 2598 port = m.group('port') or '5432' 2599 sslmode = m.group('sslmode') 2600 if sslmode: 2601 msg = ("dbname='%s' user='%s' host='%s' " 2602 "port=%s password='%s' sslmode='%s'") \ 2603 % (db, user, host, port, password, sslmode) 2604 else: 2605 msg = ("dbname='%s' user='%s' host='%s' " 2606 "port=%s password='%s'") \ 2607 % (db, user, host, port, password) 2608 # choose diver according uri 2609 self.__version__ = "%s %s" % (self.driver.__name__, self.driver.__version__) 2610 def connector(msg=msg,driver_args=driver_args): 2611 return self.driver.connect(msg,**driver_args)
2612 self.connector = connector 2613 if do_connect: self.reconnect()
2614
2615 - def after_connection(self):
2616 self.connection.set_client_encoding('UTF8') 2617 self.execute("SET standard_conforming_strings=on;") 2618 self.try_json()
2619
2620 - def lastrowid(self,table):
2621 self.execute("select currval('%s')" % table._sequence_name) 2622 return int(self.cursor.fetchone()[0])
2623
2624 - def try_json(self):
2625 # check JSON data type support 2626 # (to be added to after_connection) 2627 if self.driver_name == "pg8000": 2628 supports_json = self.connection.server_version >= "9.2.0" 2629 elif (self.driver_name == "psycopg2") and \ 2630 (self.driver.__version__ >= "2.0.12"): 2631 supports_json = self.connection.server_version >= 90200 2632 elif self.driver_name == "zxJDBC": 2633 supports_json = self.connection.dbversion >= "9.2.0" 2634 else: supports_json = None 2635 if supports_json: self.types["json"] = "JSON" 2636 else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
2637
2638 - def LIKE(self,first,second):
2639 args = (self.expand(first), self.expand(second,'string')) 2640 if not first.type in ('string', 'text', 'json'): 2641 return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1]) 2642 else: 2643 return '(%s LIKE %s)' % args
2644
2645 - def ILIKE(self,first,second):
2646 args = (self.expand(first), self.expand(second,'string')) 2647 if not first.type in ('string', 'text', 'json'): 2648 return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1]) 2649 else: 2650 return '(%s ILIKE %s)' % args
2651
2652 - def REGEXP(self,first,second):
2653 return '(%s ~ %s)' % (self.expand(first), 2654 self.expand(second,'string'))
2655
2656 - def STARTSWITH(self,first,second):
2657 return '(%s ILIKE %s)' % (self.expand(first), 2658 self.expand(second+'%','string'))
2659
2660 - def ENDSWITH(self,first,second):
2661 return '(%s ILIKE %s)' % (self.expand(first), 2662 self.expand('%'+second,'string'))
2663
2664 - def CONTAINS(self,first,second,case_sensitive=False):
2665 if first.type in ('string','text', 'json'): 2666 second = '%'+str(second).replace('%','%%')+'%' 2667 elif first.type.startswith('list:'): 2668 second = '%|'+str(second).replace('|','||').replace('%','%%')+'|%' 2669 op = case_sensitive and self.LIKE or self.ILIKE 2670 return op(first,second)
2671 2672 # GIS functions 2673
2674 - def ST_ASGEOJSON(self, first, second):
2675 """ 2676 http://postgis.org/docs/ST_AsGeoJSON.html 2677 """ 2678 return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'], 2679 self.expand(first), second['precision'], second['options'])
2680
2681 - def ST_ASTEXT(self, first):
2682 """ 2683 http://postgis.org/docs/ST_AsText.html 2684 """ 2685 return 'ST_AsText(%s)' %(self.expand(first))
2686
2687 - def ST_X(self, first):
2688 """ 2689 http://postgis.org/docs/ST_X.html 2690 """ 2691 return 'ST_X(%s)' %(self.expand(first))
2692
2693 - def ST_Y(self, first):
2694 """ 2695 http://postgis.org/docs/ST_Y.html 2696 """ 2697 return 'ST_Y(%s)' %(self.expand(first))
2698
2699 - def ST_CONTAINS(self, first, second):
2700 """ 2701 http://postgis.org/docs/ST_Contains.html 2702 """ 2703 return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2704
2705 - def ST_DISTANCE(self, first, second):
2706 """ 2707 http://postgis.org/docs/ST_Distance.html 2708 """ 2709 return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2710
2711 - def ST_EQUALS(self, first, second):
2712 """ 2713 http://postgis.org/docs/ST_Equals.html 2714 """ 2715 return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2716
2717 - def ST_INTERSECTS(self, first, second):
2718 """ 2719 http://postgis.org/docs/ST_Intersects.html 2720 """ 2721 return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2722
2723 - def ST_OVERLAPS(self, first, second):
2724 """ 2725 http://postgis.org/docs/ST_Overlaps.html 2726 """ 2727 return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2728
2729 - def ST_SIMPLIFY(self, first, second):
2730 """ 2731 http://postgis.org/docs/ST_Simplify.html 2732 """ 2733 return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
2734
2735 - def ST_TOUCHES(self, first, second):
2736 """ 2737 http://postgis.org/docs/ST_Touches.html 2738 """ 2739 return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2740
2741 - def ST_WITHIN(self, first, second):
2742 """ 2743 http://postgis.org/docs/ST_Within.html 2744 """ 2745 return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2746
2747 - def represent(self, obj, fieldtype):
2748 field_is_type = fieldtype.startswith 2749 if field_is_type('geo'): 2750 srid = 4326 # postGIS default srid for geometry 2751 geotype, parms = fieldtype[:-1].split('(') 2752 parms = parms.split(',') 2753 if len(parms) >= 2: 2754 schema, srid = parms[:2] 2755 if field_is_type('geometry'): 2756 value = "ST_GeomFromText('%s',%s)" %(obj, srid) 2757 elif field_is_type('geography'): 2758 value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj) 2759 # else: 2760 # raise SyntaxError('Invalid field type %s' %fieldtype) 2761 return value 2762 return BaseAdapter.represent(self, obj, fieldtype)
2763
2764 -class NewPostgreSQLAdapter(PostgreSQLAdapter):
2765 drivers = ('psycopg2','pg8000') 2766 2767 types = { 2768 'boolean': 'CHAR(1)', 2769 'string': 'VARCHAR(%(length)s)', 2770 'text': 'TEXT', 2771 'json': 'TEXT', 2772 'password': 'VARCHAR(%(length)s)', 2773 'blob': 'BYTEA', 2774 'upload': 'VARCHAR(%(length)s)', 2775 'integer': 'INTEGER', 2776 'bigint': 'BIGINT', 2777 'float': 'FLOAT', 2778 'double': 'FLOAT8', 2779 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 2780 'date': 'DATE', 2781 'time': 'TIME', 2782 'datetime': 'TIMESTAMP', 2783 'id': 'SERIAL PRIMARY KEY', 2784 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2785 'list:integer': 'BIGINT[]', 2786 'list:string': 'TEXT[]', 2787 'list:reference': 'BIGINT[]', 2788 'geometry': 'GEOMETRY', 2789 'geography': 'GEOGRAPHY', 2790 'big-id': 'BIGSERIAL PRIMARY KEY', 2791 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2792 } 2793
2794 - def parse_list_integers(self, value, field_type):
2795 return value
2796
2797 - def parse_list_references(self, value, field_type):
2798 return [self.parse_reference(r, field_type[5:]) for r in value]
2799
2800 - def parse_list_strings(self, value, field_type):
2801 return value
2802
2803 - def represent(self, obj, fieldtype):
2804 field_is_type = fieldtype.startswith 2805 if field_is_type('list:'): 2806 if not obj: 2807 obj = [] 2808 elif not isinstance(obj, (list, tuple)): 2809 obj = [obj] 2810 if field_is_type('list:string'): 2811 obj = map(str,obj) 2812 else: 2813 obj = map(int,obj) 2814 return 'ARRAY[%s]' % ','.join(repr(item) for item in obj) 2815 return BaseAdapter.represent(self, obj, fieldtype)
2816
2817 2818 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
2819 drivers = ('zxJDBC',) 2820 2821 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$') 2822
2823 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 2824 credential_decoder=IDENTITY, driver_args={}, 2825 adapter_args={}, do_connect=True, after_connection=None ):
2826 self.db = db 2827 self.dbengine = "postgres" 2828 self.uri = uri 2829 if do_connect: self.find_driver(adapter_args,uri) 2830 self.pool_size = pool_size 2831 self.folder = folder 2832 self.db_codec = db_codec 2833 self._after_connection = after_connection 2834 self.find_or_make_work_folder() 2835 ruri = uri.split('://',1)[1] 2836 m = self.REGEX_URI.match(ruri) 2837 if not m: 2838 raise SyntaxError("Invalid URI string in DAL") 2839 user = credential_decoder(m.group('user')) 2840 if not user: 2841 raise SyntaxError('User required') 2842 password = credential_decoder(m.group('password')) 2843 if not password: 2844 password = '' 2845 host = m.group('host') 2846 if not host: 2847 raise SyntaxError('Host name required') 2848 db = m.group('db') 2849 if not db: 2850 raise SyntaxError('Database name required') 2851 port = m.group('port') or '5432' 2852 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password) 2853 def connector(msg=msg,driver_args=driver_args): 2854 return self.driver.connect(*msg,**driver_args)
2855 self.connector = connector 2856 if do_connect: self.reconnect()
2857
2858 - def after_connection(self):
2859 self.connection.set_client_encoding('UTF8') 2860 self.execute('BEGIN;') 2861 self.execute("SET CLIENT_ENCODING TO 'UNICODE';") 2862 self.try_json()
2863
2864 2865 -class OracleAdapter(BaseAdapter):
2866 drivers = ('cx_Oracle',) 2867 2868 commit_on_alter_table = False 2869 types = { 2870 'boolean': 'CHAR(1)', 2871 'string': 'VARCHAR2(%(length)s)', 2872 'text': 'CLOB', 2873 'json': 'CLOB', 2874 'password': 'VARCHAR2(%(length)s)', 2875 'blob': 'CLOB', 2876 'upload': 'VARCHAR2(%(length)s)', 2877 'integer': 'INT', 2878 'bigint': 'NUMBER', 2879 'float': 'FLOAT', 2880 'double': 'BINARY_DOUBLE', 2881 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 2882 'date': 'DATE', 2883 'time': 'CHAR(8)', 2884 'datetime': 'DATE', 2885 'id': 'NUMBER PRIMARY KEY', 2886 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2887 'list:integer': 'CLOB', 2888 'list:string': 'CLOB', 2889 'list:reference': 'CLOB', 2890 'big-id': 'NUMBER PRIMARY KEY', 2891 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2892 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 2893 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 2894 } 2895
2896 - def sequence_name(self,tablename):
2897 return '%s_sequence' % tablename
2898
2899 - def trigger_name(self,tablename):
2900 return '%s_trigger' % tablename
2901
2902 - def LEFT_JOIN(self):
2903 return 'LEFT OUTER JOIN'
2904
2905 - def RANDOM(self):
2906 return 'dbms_random.value'
2907
2908 - def NOT_NULL(self,default,field_type):
2909 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2910
2911 - def _drop(self,table,mode):
2912 sequence_name = table._sequence_name 2913 return ['DROP TABLE %s %s;' % (table, mode), 'DROP SEQUENCE %s;' % sequence_name]
2914
2915 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2916 if limitby: 2917 (lmin, lmax) = limitby 2918 if len(sql_w) > 1: 2919 sql_w_row = sql_w + ' AND w_row > %i' % lmin 2920 else: 2921 sql_w_row = 'WHERE w_row > %i' % lmin 2922 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o) 2923 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2924
2925 - def constraint_name(self, tablename, fieldname):
2926 constraint_name = BaseAdapter.constraint_name(self, tablename, fieldname) 2927 if len(constraint_name)>30: 2928 constraint_name = '%s_%s__constraint' % (tablename[:10], fieldname[:7]) 2929 return constraint_name
2930
2931 - def represent_exceptions(self, obj, fieldtype):
2932 if fieldtype == 'blob': 2933 obj = base64.b64encode(str(obj)) 2934 return ":CLOB('%s')" % obj 2935 elif fieldtype == 'date': 2936 if isinstance(obj, (datetime.date, datetime.datetime)): 2937 obj = obj.isoformat()[:10] 2938 else: 2939 obj = str(obj) 2940 return "to_date('%s','yyyy-mm-dd')" % obj 2941 elif fieldtype == 'datetime': 2942 if isinstance(obj, datetime.datetime): 2943 obj = obj.isoformat()[:19].replace('T',' ') 2944 elif isinstance(obj, datetime.date): 2945 obj = obj.isoformat()[:10]+' 00:00:00' 2946 else: 2947 obj = str(obj) 2948 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj 2949 return None
2950
2951 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 2952 credential_decoder=IDENTITY, driver_args={}, 2953 adapter_args={}, do_connect=True, after_connection=None):
2954 self.db = db 2955 self.dbengine = "oracle" 2956 self.uri = uri 2957 if do_connect: self.find_driver(adapter_args,uri) 2958 self.pool_size = pool_size 2959 self.folder = folder 2960 self.db_codec = db_codec 2961 self._after_connection = after_connection 2962 self.find_or_make_work_folder() 2963 ruri = uri.split('://',1)[1] 2964 if not 'threaded' in driver_args: 2965 driver_args['threaded']=True 2966 def connector(uri=ruri,driver_args=driver_args): 2967 return self.driver.connect(uri,**driver_args)
2968 self.connector = connector 2969 if do_connect: self.reconnect()
2970
2971 - def after_connection(self):
2972 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';") 2973 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
2974 2975 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))") 2976
2977 - def execute(self, command, args=None):
2978 args = args or [] 2979 i = 1 2980 while True: 2981 m = self.oracle_fix.match(command) 2982 if not m: 2983 break 2984 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):] 2985 args.append(m.group('clob')[6:-2].replace("''", "'")) 2986 i += 1 2987 if command[-1:]==';': 2988 command = command[:-1] 2989 return self.log_execute(command, args)
2990
2991 - def create_sequence_and_triggers(self, query, table, **args):
2992 tablename = table._tablename 2993 sequence_name = table._sequence_name 2994 trigger_name = table._trigger_name 2995 self.execute(query) 2996 self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name) 2997 self.execute(""" 2998 CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW 2999 DECLARE 3000 curr_val NUMBER; 3001 diff_val NUMBER; 3002 PRAGMA autonomous_transaction; 3003 BEGIN 3004 IF :NEW.id IS NOT NULL THEN 3005 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val; 3006 diff_val := :NEW.id - curr_val - 1; 3007 IF diff_val != 0 THEN 3008 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val; 3009 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val; 3010 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1'; 3011 END IF; 3012 END IF; 3013 SELECT %(sequence_name)s.nextval INTO :NEW.id FROM DUAL; 3014 END; 3015 """ % dict(trigger_name=trigger_name, tablename=tablename, sequence_name=sequence_name))
3016
3017 - def lastrowid(self,table):
3018 sequence_name = table._sequence_name 3019 self.execute('SELECT %s.currval FROM dual;' % sequence_name) 3020 return int(self.cursor.fetchone()[0])
3021 3022 #def parse_value(self, value, field_type, blob_decode=True): 3023 # if blob_decode and isinstance(value, cx_Oracle.LOB): 3024 # try: 3025 # value = value.read() 3026 # except self.driver.ProgrammingError: 3027 # # After a subsequent fetch the LOB value is not valid anymore 3028 # pass 3029 # return BaseAdapter.parse_value(self, value, field_type, blob_decode) 3030
3031 - def _fetchall(self):
3032 if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description): 3033 return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \ 3034 for c in r]) for r in self.cursor] 3035 else: 3036 return self.cursor.fetchall()
3037
3038 -class MSSQLAdapter(BaseAdapter):
3039 drivers = ('pyodbc',) 3040 T_SEP = 'T' 3041 3042 types = { 3043 'boolean': 'BIT', 3044 'string': 'VARCHAR(%(length)s)', 3045 'text': 'TEXT', 3046 'json': 'TEXT', 3047 'password': 'VARCHAR(%(length)s)', 3048 'blob': 'IMAGE', 3049 'upload': 'VARCHAR(%(length)s)', 3050 'integer': 'INT', 3051 'bigint': 'BIGINT', 3052 'float': 'FLOAT', 3053 'double': 'FLOAT', 3054 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3055 'date': 'DATETIME', 3056 'time': 'CHAR(8)', 3057 'datetime': 'DATETIME', 3058 'id': 'INT IDENTITY PRIMARY KEY', 3059 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3060 'list:integer': 'TEXT', 3061 'list:string': 'TEXT', 3062 'list:reference': 'TEXT', 3063 'geometry': 'geometry', 3064 'geography': 'geography', 3065 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 3066 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3067 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3068 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 3069 } 3070
3071 - def concat_add(self,tablename):
3072 return '; ALTER TABLE %s ADD ' % tablename
3073
3074 - def varquote(self,name):
3075 return varquote_aux(name,'[%s]')
3076
3077 - def EXTRACT(self,field,what):
3078 return "DATEPART(%s,%s)" % (what, self.expand(field))
3079
3080 - def LEFT_JOIN(self):
3081 return 'LEFT OUTER JOIN'
3082
3083 - def RANDOM(self):
3084 return 'NEWID()'
3085
3086 - def ALLOW_NULL(self):
3087 return ' NULL'
3088
3089 - def SUBSTRING(self,field,parameters):
3090 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
3091
3092 - def PRIMARY_KEY(self,key):
3093 return 'PRIMARY KEY CLUSTERED (%s)' % key
3094
3095 - def AGGREGATE(self, first, what):
3096 if what == 'LENGTH': 3097 what = 'LEN' 3098 return "%s(%s)" % (what, self.expand(first))
3099 3100
3101 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3102 if limitby: 3103 (lmin, lmax) = limitby 3104 sql_s += ' TOP %i' % lmax 3105 if 'GROUP BY' in sql_o: 3106 sql_o = sql_o[:sql_o.find('ORDER BY ')] 3107 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3108 3109 TRUE = 1 3110 FALSE = 0 3111 3112 REGEX_DSN = re.compile('^(?P<dsn>.+)$') 3113 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$') 3114 REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)') 3115
3116 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3117 credential_decoder=IDENTITY, driver_args={}, 3118 adapter_args={}, do_connect=True, srid=4326, 3119 after_connection=None):
3120 self.db = db 3121 self.dbengine = "mssql" 3122 self.uri = uri 3123 if do_connect: self.find_driver(adapter_args,uri) 3124 self.pool_size = pool_size 3125 self.folder = folder 3126 self.db_codec = db_codec 3127 self._after_connection = after_connection 3128 self.srid = srid 3129 self.find_or_make_work_folder() 3130 # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8 3131 ruri = uri.split('://',1)[1] 3132 if '@' not in ruri: 3133 try: 3134 m = self.REGEX_DSN.match(ruri) 3135 if not m: 3136 raise SyntaxError( 3137 'Parsing uri string(%s) has no result' % self.uri) 3138 dsn = m.group('dsn') 3139 if not dsn: 3140 raise SyntaxError('DSN required') 3141 except SyntaxError: 3142 e = sys.exc_info()[1] 3143 LOGGER.error('NdGpatch error') 3144 raise e 3145 # was cnxn = 'DSN=%s' % dsn 3146 cnxn = dsn 3147 else: 3148 m = self.REGEX_URI.match(ruri) 3149 if not m: 3150 raise SyntaxError( 3151 "Invalid URI string in DAL: %s" % self.uri) 3152 user = credential_decoder(m.group('user')) 3153 if not user: 3154 raise SyntaxError('User required') 3155 password = credential_decoder(m.group('password')) 3156 if not password: 3157 password = '' 3158 host = m.group('host') 3159 if not host: 3160 raise SyntaxError('Host name required') 3161 db = m.group('db') 3162 if not db: 3163 raise SyntaxError('Database name required') 3164 port = m.group('port') or '1433' 3165 # Parse the optional url name-value arg pairs after the '?' 3166 # (in the form of arg1=value1&arg2=value2&...) 3167 # Default values (drivers like FreeTDS insist on uppercase parameter keys) 3168 argsdict = { 'DRIVER':'{SQL Server}' } 3169 urlargs = m.group('urlargs') or '' 3170 for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs): 3171 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue') 3172 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()]) 3173 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \ 3174 % (host, port, db, user, password, urlargs) 3175 def connector(cnxn=cnxn,driver_args=driver_args): 3176 return self.driver.connect(cnxn,**driver_args)
3177 self.connector = connector 3178 if do_connect: self.reconnect()
3179
3180 - def lastrowid(self,table):
3181 #self.execute('SELECT @@IDENTITY;') 3182 self.execute('SELECT SCOPE_IDENTITY();') 3183 return int(self.cursor.fetchone()[0])
3184
3185 - def integrity_error_class(self):
3186 return pyodbc.IntegrityError
3187
3188 - def rowslice(self,rows,minimum=0,maximum=None):
3189 if maximum is None: 3190 return rows[minimum:] 3191 return rows[minimum:maximum]
3192
3193 - def EPOCH(self, first):
3194 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3195 3196 # GIS Spatial Extensions 3197 3198 # No STAsGeoJSON in MSSQL 3199
3200 - def ST_ASTEXT(self, first):
3201 return '%s.STAsText()' %(self.expand(first))
3202
3203 - def ST_CONTAINS(self, first, second):
3204 return '%s.STContains(%s)=1' %(self.expand(first), self.expand(second, first.type))
3205
3206 - def ST_DISTANCE(self, first, second):
3207 return '%s.STDistance(%s)' %(self.expand(first), self.expand(second, first.type))
3208
3209 - def ST_EQUALS(self, first, second):
3210 return '%s.STEquals(%s)=1' %(self.expand(first), self.expand(second, first.type))
3211
3212 - def ST_INTERSECTS(self, first, second):
3213 return '%s.STIntersects(%s)=1' %(self.expand(first), self.expand(second, first.type))
3214
3215 - def ST_OVERLAPS(self, first, second):
3216 return '%s.STOverlaps(%s)=1' %(self.expand(first), self.expand(second, first.type))
3217 3218 # no STSimplify in MSSQL 3219
3220 - def ST_TOUCHES(self, first, second):
3221 return '%s.STTouches(%s)=1' %(self.expand(first), self.expand(second, first.type))
3222
3223 - def ST_WITHIN(self, first, second):
3224 return '%s.STWithin(%s)=1' %(self.expand(first), self.expand(second, first.type))
3225
3226 - def represent(self, obj, fieldtype):
3227 field_is_type = fieldtype.startswith 3228 if field_is_type('geometry'): 3229 srid = 0 # MS SQL default srid for geometry 3230 geotype, parms = fieldtype[:-1].split('(') 3231 if parms: 3232 srid = parms 3233 return "geometry::STGeomFromText('%s',%s)" %(obj, srid) 3234 elif fieldtype == 'geography': 3235 srid = 4326 # MS SQL default srid for geography 3236 geotype, parms = fieldtype[:-1].split('(') 3237 if parms: 3238 srid = parms 3239 return "geography::STGeomFromText('%s',%s)" %(obj, srid) 3240 # else: 3241 # raise SyntaxError('Invalid field type %s' %fieldtype) 3242 return "geometry::STGeomFromText('%s',%s)" %(obj, srid) 3243 return BaseAdapter.represent(self, obj, fieldtype)
3244
3245 3246 -class MSSQL3Adapter(MSSQLAdapter):
3247 """ experimental support for pagination in MSSQL"""
3248 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3249 if limitby: 3250 (lmin, lmax) = limitby 3251 if lmin == 0: 3252 sql_s += ' TOP %i' % lmax 3253 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o) 3254 lmin += 1 3255 sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:] 3256 sql_g_inner = sql_o[:sql_o.find('ORDER BY ')] 3257 sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))] 3258 sql_f_inner = [f for f in sql_f.split(',')] 3259 sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)] 3260 sql_f_iproxy = ', '.join(sql_f_iproxy) 3261 sql_f_oproxy = ', '.join(sql_f_outer) 3262 return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax) 3263 return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
3264 - def rowslice(self,rows,minimum=0,maximum=None):
3265 return rows
3266
3267 3268 -class MSSQL2Adapter(MSSQLAdapter):
3269 drivers = ('pyodbc',) 3270 3271 types = { 3272 'boolean': 'CHAR(1)', 3273 'string': 'NVARCHAR(%(length)s)', 3274 'text': 'NTEXT', 3275 'json': 'NTEXT', 3276 'password': 'NVARCHAR(%(length)s)', 3277 'blob': 'IMAGE', 3278 'upload': 'NVARCHAR(%(length)s)', 3279 'integer': 'INT', 3280 'bigint': 'BIGINT', 3281 'float': 'FLOAT', 3282 'double': 'FLOAT', 3283 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3284 'date': 'DATETIME', 3285 'time': 'CHAR(8)', 3286 'datetime': 'DATETIME', 3287 'id': 'INT IDENTITY PRIMARY KEY', 3288 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3289 'list:integer': 'NTEXT', 3290 'list:string': 'NTEXT', 3291 'list:reference': 'NTEXT', 3292 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 3293 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3294 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3295 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 3296 } 3297
3298 - def represent(self, obj, fieldtype):
3299 value = BaseAdapter.represent(self, obj, fieldtype) 3300 if fieldtype in ('string','text', 'json') and value[:1]=="'": 3301 value = 'N'+value 3302 return value
3303
3304 - def execute(self,a):
3305 return self.log_execute(a.decode('utf8'))
3306
3307 3308 -class SybaseAdapter(MSSQLAdapter):
3309 drivers = ('Sybase',) 3310 3311 types = { 3312 'boolean': 'BIT', 3313 'string': 'CHAR VARYING(%(length)s)', 3314 'text': 'TEXT', 3315 'json': 'TEXT', 3316 'password': 'CHAR VARYING(%(length)s)', 3317 'blob': 'IMAGE', 3318 'upload': 'CHAR VARYING(%(length)s)', 3319 'integer': 'INT', 3320 'bigint': 'BIGINT', 3321 'float': 'FLOAT', 3322 'double': 'FLOAT', 3323 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3324 'date': 'DATETIME', 3325 'time': 'CHAR(8)', 3326 'datetime': 'DATETIME', 3327 'id': 'INT IDENTITY PRIMARY KEY', 3328 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3329 'list:integer': 'TEXT', 3330 'list:string': 'TEXT', 3331 'list:reference': 'TEXT', 3332 'geometry': 'geometry', 3333 'geography': 'geography', 3334 'big-id': 'BIGINT IDENTITY PRIMARY KEY', 3335 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3336 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3337 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 3338 } 3339 3340
3341 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3342 credential_decoder=IDENTITY, driver_args={}, 3343 adapter_args={}, do_connect=True, srid=4326, 3344 after_connection=None):
3345 self.db = db 3346 self.dbengine = "sybase" 3347 self.uri = uri 3348 if do_connect: self.find_driver(adapter_args,uri) 3349 self.pool_size = pool_size 3350 self.folder = folder 3351 self.db_codec = db_codec 3352 self._after_connection = after_connection 3353 self.srid = srid 3354 self.find_or_make_work_folder() 3355 # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8 3356 ruri = uri.split('://',1)[1] 3357 if '@' not in ruri: 3358 try: 3359 m = self.REGEX_DSN.match(ruri) 3360 if not m: 3361 raise SyntaxError( 3362 'Parsing uri string(%s) has no result' % self.uri) 3363 dsn = m.group('dsn') 3364 if not dsn: 3365 raise SyntaxError('DSN required') 3366 except SyntaxError: 3367 e = sys.exc_info()[1] 3368 LOGGER.error('NdGpatch error') 3369 raise e 3370 else: 3371 m = self.REGEX_URI.match(uri) 3372 if not m: 3373 raise SyntaxError( 3374 "Invalid URI string in DAL: %s" % self.uri) 3375 user = credential_decoder(m.group('user')) 3376 if not user: 3377 raise SyntaxError('User required') 3378 password = credential_decoder(m.group('password')) 3379 if not password: 3380 password = '' 3381 host = m.group('host') 3382 if not host: 3383 raise SyntaxError('Host name required') 3384 db = m.group('db') 3385 if not db: 3386 raise SyntaxError('Database name required') 3387 port = m.group('port') or '1433' 3388 3389 dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db) 3390 3391 driver_args.update(user = credential_decoder(user), 3392 password = credential_decoder(password)) 3393 3394 def connector(dsn=dsn,driver_args=driver_args): 3395 return self.driver.connect(dsn,**driver_args)
3396 self.connector = connector 3397 if do_connect: self.reconnect()
3398
3399 - def integrity_error_class(self):
3400 return RuntimeError # FIX THIS
3401
3402 3403 -class FireBirdAdapter(BaseAdapter):
3404 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc') 3405 3406 commit_on_alter_table = False 3407 support_distributed_transaction = True 3408 types = { 3409 'boolean': 'CHAR(1)', 3410 'string': 'VARCHAR(%(length)s)', 3411 'text': 'BLOB SUB_TYPE 1', 3412 'json': 'BLOB SUB_TYPE 1', 3413 'password': 'VARCHAR(%(length)s)', 3414 'blob': 'BLOB SUB_TYPE 0', 3415 'upload': 'VARCHAR(%(length)s)', 3416 'integer': 'INTEGER', 3417 'bigint': 'BIGINT', 3418 'float': 'FLOAT', 3419 'double': 'DOUBLE PRECISION', 3420 'decimal': 'DECIMAL(%(precision)s,%(scale)s)', 3421 'date': 'DATE', 3422 'time': 'TIME', 3423 'datetime': 'TIMESTAMP', 3424 'id': 'INTEGER PRIMARY KEY', 3425 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3426 'list:integer': 'BLOB SUB_TYPE 1', 3427 'list:string': 'BLOB SUB_TYPE 1', 3428 'list:reference': 'BLOB SUB_TYPE 1', 3429 'big-id': 'BIGINT PRIMARY KEY', 3430 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3431 } 3432
3433 - def sequence_name(self,tablename):
3434 return 'genid_%s' % tablename
3435
3436 - def trigger_name(self,tablename):
3437 return 'trg_id_%s' % tablename
3438
3439 - def RANDOM(self):
3440 return 'RAND()'
3441
3442 - def EPOCH(self, first):
3443 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3444
3445 - def NOT_NULL(self,default,field_type):
3446 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3447
3448 - def SUBSTRING(self,field,parameters):
3449 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
3450
3451 - def CONTAINING(self,first,second):
3452 "case in-sensitive like operator" 3453 return '(%s CONTAINING %s)' % (self.expand(first), 3454 self.expand(second, 'string'))
3455
3456 - def CONTAINS(self, first, second, case_sensitive=False):
3457 if first.type in ('string','text'): 3458 second = str(second).replace('%','%%') 3459 elif first.type.startswith('list:'): 3460 second = '|'+str(second).replace('|','||').replace('%','%%')+'|' 3461 return self.CONTAINING(first,second)
3462
3463 - def _drop(self,table,mode):
3464 sequence_name = table._sequence_name 3465 return ['DROP TABLE %s %s;' % (table, mode), 'DROP GENERATOR %s;' % sequence_name]
3466
3467 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3468 if limitby: 3469 (lmin, lmax) = limitby 3470 sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s) 3471 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3472
3473 - def _truncate(self,table,mode = ''):
3474 return ['DELETE FROM %s;' % table._tablename, 3475 'SET GENERATOR %s TO 0;' % table._sequence_name]
3476 3477 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$') 3478
3479 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3480 credential_decoder=IDENTITY, driver_args={}, 3481 adapter_args={}, do_connect=True, after_connection=None):
3482 self.db = db 3483 self.dbengine = "firebird" 3484 self.uri = uri 3485 if do_connect: self.find_driver(adapter_args,uri) 3486 self.pool_size = pool_size 3487 self.folder = folder 3488 self.db_codec = db_codec 3489 self._after_connection = after_connection 3490 self.find_or_make_work_folder() 3491 ruri = uri.split('://',1)[1] 3492 m = self.REGEX_URI.match(ruri) 3493 if not m: 3494 raise SyntaxError("Invalid URI string in DAL: %s" % self.uri) 3495 user = credential_decoder(m.group('user')) 3496 if not user: 3497 raise SyntaxError('User required') 3498 password = credential_decoder(m.group('password')) 3499 if not password: 3500 password = '' 3501 host = m.group('host') 3502 if not host: 3503 raise SyntaxError('Host name required') 3504 port = int(m.group('port') or 3050) 3505 db = m.group('db') 3506 if not db: 3507 raise SyntaxError('Database name required') 3508 charset = m.group('charset') or 'UTF8' 3509 driver_args.update(dsn='%s/%s:%s' % (host,port,db), 3510 user = credential_decoder(user), 3511 password = credential_decoder(password), 3512 charset = charset) 3513 3514 def connector(driver_args=driver_args): 3515 return self.driver.connect(**driver_args)
3516 self.connector = connector 3517 if do_connect: self.reconnect()
3518
3519 - def create_sequence_and_triggers(self, query, table, **args):
3520 tablename = table._tablename 3521 sequence_name = table._sequence_name 3522 trigger_name = table._trigger_name 3523 self.execute(query) 3524 self.execute('create generator %s;' % sequence_name) 3525 self.execute('set generator %s to 0;' % sequence_name) 3526 self.execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' % (trigger_name, tablename, sequence_name))
3527
3528 - def lastrowid(self,table):
3529 sequence_name = table._sequence_name 3530 self.execute('SELECT gen_id(%s, 0) FROM rdb$database' % sequence_name) 3531 return int(self.cursor.fetchone()[0])
3532
3533 3534 -class FireBirdEmbeddedAdapter(FireBirdAdapter):
3535 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc') 3536 3537 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$') 3538
3539 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3540 credential_decoder=IDENTITY, driver_args={}, 3541 adapter_args={}, do_connect=True, after_connection=None):
3542 self.db = db 3543 self.dbengine = "firebird" 3544 self.uri = uri 3545 if do_connect: self.find_driver(adapter_args,uri) 3546 self.pool_size = pool_size 3547 self.folder = folder 3548 self.db_codec = db_codec 3549 self._after_connection = after_connection 3550 self.find_or_make_work_folder() 3551 ruri = uri.split('://',1)[1] 3552 m = self.REGEX_URI.match(ruri) 3553 if not m: 3554 raise SyntaxError( 3555 "Invalid URI string in DAL: %s" % self.uri) 3556 user = credential_decoder(m.group('user')) 3557 if not user: 3558 raise SyntaxError('User required') 3559 password = credential_decoder(m.group('password')) 3560 if not password: 3561 password = '' 3562 pathdb = m.group('path') 3563 if not pathdb: 3564 raise SyntaxError('Path required') 3565 charset = m.group('charset') 3566 if not charset: 3567 charset = 'UTF8' 3568 host = '' 3569 driver_args.update(host=host, 3570 database=pathdb, 3571 user=credential_decoder(user), 3572 password=credential_decoder(password), 3573 charset=charset) 3574 3575 def connector(driver_args=driver_args): 3576 return self.driver.connect(**driver_args)
3577 self.connector = connector 3578 if do_connect: self.reconnect()
3579
3580 -class InformixAdapter(BaseAdapter):
3581 drivers = ('informixdb',) 3582 3583 types = { 3584 'boolean': 'CHAR(1)', 3585 'string': 'VARCHAR(%(length)s)', 3586 'text': 'BLOB SUB_TYPE 1', 3587 'json': 'BLOB SUB_TYPE 1', 3588 'password': 'VARCHAR(%(length)s)', 3589 'blob': 'BLOB SUB_TYPE 0', 3590 'upload': 'VARCHAR(%(length)s)', 3591 'integer': 'INTEGER', 3592 'bigint': 'BIGINT', 3593 'float': 'FLOAT', 3594 'double': 'DOUBLE PRECISION', 3595 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3596 'date': 'DATE', 3597 'time': 'CHAR(8)', 3598 'datetime': 'DATETIME', 3599 'id': 'SERIAL', 3600 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3601 'list:integer': 'BLOB SUB_TYPE 1', 3602 'list:string': 'BLOB SUB_TYPE 1', 3603 'list:reference': 'BLOB SUB_TYPE 1', 3604 'big-id': 'BIGSERIAL', 3605 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3606 'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s', 3607 'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s', 3608 } 3609
3610 - def RANDOM(self):
3611 return 'Random()'
3612
3613 - def NOT_NULL(self,default,field_type):
3614 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3615
3616 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3617 if limitby: 3618 (lmin, lmax) = limitby 3619 fetch_amt = lmax - lmin 3620 dbms_version = int(self.connection.dbms_version.split('.')[0]) 3621 if lmin and (dbms_version >= 10): 3622 # Requires Informix 10.0+ 3623 sql_s += ' SKIP %d' % (lmin, ) 3624 if fetch_amt and (dbms_version >= 9): 3625 # Requires Informix 9.0+ 3626 sql_s += ' FIRST %d' % (fetch_amt, ) 3627 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3628
3629 - def represent_exceptions(self, obj, fieldtype):
3630 if fieldtype == 'date': 3631 if isinstance(obj, (datetime.date, datetime.datetime)): 3632 obj = obj.isoformat()[:10] 3633 else: 3634 obj = str(obj) 3635 return "to_date('%s','%%Y-%%m-%%d')" % obj 3636 elif fieldtype == 'datetime': 3637 if isinstance(obj, datetime.datetime): 3638 obj = obj.isoformat()[:19].replace('T',' ') 3639 elif isinstance(obj, datetime.date): 3640 obj = obj.isoformat()[:10]+' 00:00:00' 3641 else: 3642 obj = str(obj) 3643 return "to_date('%s','%%Y-%%m-%%d %%H:%%M:%%S')" % obj 3644 return None
3645 3646 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$') 3647
3648 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3649 credential_decoder=IDENTITY, driver_args={}, 3650 adapter_args={}, do_connect=True, after_connection=None):
3651 self.db = db 3652 self.dbengine = "informix" 3653 self.uri = uri 3654 if do_connect: self.find_driver(adapter_args,uri) 3655 self.pool_size = pool_size 3656 self.folder = folder 3657 self.db_codec = db_codec 3658 self._after_connection = after_connection 3659 self.find_or_make_work_folder() 3660 ruri = uri.split('://',1)[1] 3661 m = self.REGEX_URI.match(ruri) 3662 if not m: 3663 raise SyntaxError( 3664 "Invalid URI string in DAL: %s" % self.uri) 3665 user = credential_decoder(m.group('user')) 3666 if not user: 3667 raise SyntaxError('User required') 3668 password = credential_decoder(m.group('password')) 3669 if not password: 3670 password = '' 3671 host = m.group('host') 3672 if not host: 3673 raise SyntaxError('Host name required') 3674 db = m.group('db') 3675 if not db: 3676 raise SyntaxError('Database name required') 3677 user = credential_decoder(user) 3678 password = credential_decoder(password) 3679 dsn = '%s@%s' % (db,host) 3680 driver_args.update(user=user,password=password,autocommit=True) 3681 def connector(dsn=dsn,driver_args=driver_args): 3682 return self.driver.connect(dsn,**driver_args)
3683 self.connector = connector 3684 if do_connect: self.reconnect()
3685
3686 - def execute(self,command):
3687 if command[-1:]==';': 3688 command = command[:-1] 3689 return self.log_execute(command)
3690
3691 - def lastrowid(self,table):
3692 return self.cursor.sqlerrd[1]
3693
3694 - def integrity_error_class(self):
3695 return informixdb.IntegrityError
3696
3697 -class InformixSEAdapter(InformixAdapter):
3698 """ work in progress """ 3699
3700 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3701 return 'SELECT %s %s FROM %s%s%s;' % \ 3702 (sql_s, sql_f, sql_t, sql_w, sql_o)
3703
3704 - def rowslice(self,rows,minimum=0,maximum=None):
3705 if maximum is None: 3706 return rows[minimum:] 3707 return rows[minimum:maximum]
3708
3709 -class DB2Adapter(BaseAdapter):
3710 drivers = ('pyodbc',) 3711 3712 types = { 3713 'boolean': 'CHAR(1)', 3714 'string': 'VARCHAR(%(length)s)', 3715 'text': 'CLOB', 3716 'json': 'CLOB', 3717 'password': 'VARCHAR(%(length)s)', 3718 'blob': 'BLOB', 3719 'upload': 'VARCHAR(%(length)s)', 3720 'integer': 'INT', 3721 'bigint': 'BIGINT', 3722 'float': 'REAL', 3723 'double': 'DOUBLE', 3724 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3725 'date': 'DATE', 3726 'time': 'TIME', 3727 'datetime': 'TIMESTAMP', 3728 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL', 3729 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3730 'list:integer': 'CLOB', 3731 'list:string': 'CLOB', 3732 'list:reference': 'CLOB', 3733 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL', 3734 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3735 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3736 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', 3737 } 3738
3739 - def LEFT_JOIN(self):
3740 return 'LEFT OUTER JOIN'
3741
3742 - def RANDOM(self):
3743 return 'RAND()'
3744
3745 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3746 if limitby: 3747 (lmin, lmax) = limitby 3748 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax 3749 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3750
3751 - def represent_exceptions(self, obj, fieldtype):
3752 if fieldtype == 'blob': 3753 obj = base64.b64encode(str(obj)) 3754 return "BLOB('%s')" % obj 3755 elif fieldtype == 'datetime': 3756 if isinstance(obj, datetime.datetime): 3757 obj = obj.isoformat()[:19].replace('T','-').replace(':','.') 3758 elif isinstance(obj, datetime.date): 3759 obj = obj.isoformat()[:10]+'-00.00.00' 3760 return "'%s'" % obj 3761 return None
3762
3763 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3764 credential_decoder=IDENTITY, driver_args={}, 3765 adapter_args={}, do_connect=True, after_connection=None):
3766 self.db = db 3767 self.dbengine = "db2" 3768 self.uri = uri 3769 if do_connect: self.find_driver(adapter_args,uri) 3770 self.pool_size = pool_size 3771 self.folder = folder 3772 self.db_codec = db_codec 3773 self._after_connection = after_connection 3774 self.find_or_make_work_folder() 3775 ruri = uri.split('://', 1)[1] 3776 def connector(cnxn=ruri,driver_args=driver_args): 3777 return self.driver.connect(cnxn,**driver_args)
3778 self.connector = connector 3779 if do_connect: self.reconnect()
3780
3781 - def execute(self,command):
3782 if command[-1:]==';': 3783 command = command[:-1] 3784 return self.log_execute(command)
3785
3786 - def lastrowid(self,table):
3787 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table) 3788 return int(self.cursor.fetchone()[0])
3789
3790 - def rowslice(self,rows,minimum=0,maximum=None):
3791 if maximum is None: 3792 return rows[minimum:] 3793 return rows[minimum:maximum]
3794
3795 3796 -class TeradataAdapter(BaseAdapter):
3797 drivers = ('pyodbc',) 3798 3799 types = { 3800 'boolean': 'CHAR(1)', 3801 'string': 'VARCHAR(%(length)s)', 3802 'text': 'CLOB', 3803 'json': 'CLOB', 3804 'password': 'VARCHAR(%(length)s)', 3805 'blob': 'BLOB', 3806 'upload': 'VARCHAR(%(length)s)', 3807 'integer': 'INT', 3808 'bigint': 'BIGINT', 3809 'float': 'REAL', 3810 'double': 'DOUBLE', 3811 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3812 'date': 'DATE', 3813 'time': 'TIME', 3814 'datetime': 'TIMESTAMP', 3815 # Modified Constraint syntax for Teradata. 3816 # Teradata does not support ON DELETE. 3817 'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific 3818 'reference': 'INT', 3819 'list:integer': 'CLOB', 3820 'list:string': 'CLOB', 3821 'list:reference': 'CLOB', 3822 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific 3823 'big-reference': 'BIGINT', 3824 'reference FK': ' REFERENCES %(foreign_key)s', 3825 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)', 3826 } 3827
3828 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3829 credential_decoder=IDENTITY, driver_args={}, 3830 adapter_args={}, do_connect=True, after_connection=None):
3831 self.db = db 3832 self.dbengine = "teradata" 3833 self.uri = uri 3834 if do_connect: self.find_driver(adapter_args,uri) 3835 self.pool_size = pool_size 3836 self.folder = folder 3837 self.db_codec = db_codec 3838 self._after_connection = after_connection 3839 self.find_or_make_work_folder() 3840 ruri = uri.split('://', 1)[1] 3841 def connector(cnxn=ruri,driver_args=driver_args): 3842 return self.driver.connect(cnxn,**driver_args)
3843 self.connector = connector 3844 if do_connect: self.reconnect()
3845
3846 - def LEFT_JOIN(self):
3847 return 'LEFT OUTER JOIN'
3848 3849 # Similar to MSSQL, Teradata can't specify a range (for Pageby)
3850 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3851 if limitby: 3852 (lmin, lmax) = limitby 3853 sql_s += ' TOP %i' % lmax 3854 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3855
3856 - def _truncate(self, table, mode=''):
3857 tablename = table._tablename 3858 return ['DELETE FROM %s ALL;' % (tablename)]
3859 3860 INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name
3861 # (ANSI-SQL wants this form of name 3862 # to be a delimited identifier) 3863 3864 -class IngresAdapter(BaseAdapter):
3865 drivers = ('pyodbc',) 3866 3867 types = { 3868 'boolean': 'CHAR(1)', 3869 'string': 'VARCHAR(%(length)s)', 3870 'text': 'CLOB', 3871 'json': 'CLOB', 3872 'password': 'VARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes? 3873 'blob': 'BLOB', 3874 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type? 3875 'integer': 'INTEGER4', # or int8... 3876 'bigint': 'BIGINT', 3877 'float': 'FLOAT', 3878 'double': 'FLOAT8', 3879 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3880 'date': 'ANSIDATE', 3881 'time': 'TIME WITHOUT TIME ZONE', 3882 'datetime': 'TIMESTAMP WITHOUT TIME ZONE', 3883 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME, 3884 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3885 'list:integer': 'CLOB', 3886 'list:string': 'CLOB', 3887 'list:reference': 'CLOB', 3888 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME, 3889 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3890 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3891 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO 3892 } 3893
3894 - def LEFT_JOIN(self):
3895 return 'LEFT OUTER JOIN'
3896
3897 - def RANDOM(self):
3898 return 'RANDOM()'
3899
3900 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3901 if limitby: 3902 (lmin, lmax) = limitby 3903 fetch_amt = lmax - lmin 3904 if fetch_amt: 3905 sql_s += ' FIRST %d ' % (fetch_amt, ) 3906 if lmin: 3907 # Requires Ingres 9.2+ 3908 sql_o += ' OFFSET %d' % (lmin, ) 3909 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3910
3911 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 3912 credential_decoder=IDENTITY, driver_args={}, 3913 adapter_args={}, do_connect=True, after_connection=None):
3914 self.db = db 3915 self.dbengine = "ingres" 3916 self._driver = pyodbc 3917 self.uri = uri 3918 if do_connect: self.find_driver(adapter_args,uri) 3919 self.pool_size = pool_size 3920 self.folder = folder 3921 self.db_codec = db_codec 3922 self._after_connection = after_connection 3923 self.find_or_make_work_folder() 3924 connstr = uri.split(':', 1)[1] 3925 # Simple URI processing 3926 connstr = connstr.lstrip() 3927 while connstr.startswith('/'): 3928 connstr = connstr[1:] 3929 if '=' in connstr: 3930 # Assume we have a regular ODBC connection string and just use it 3931 ruri = connstr 3932 else: 3933 # Assume only (local) dbname is passed in with OS auth 3934 database_name = connstr 3935 default_driver_name = 'Ingres' 3936 vnode = '(local)' 3937 servertype = 'ingres' 3938 ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name) 3939 def connector(cnxn=ruri,driver_args=driver_args): 3940 return self.driver.connect(cnxn,**driver_args)
3941 3942 self.connector = connector 3943 3944 # TODO if version is >= 10, set types['id'] to Identity column, see http://community.actian.com/wiki/Using_Ingres_Identity_Columns 3945 if do_connect: self.reconnect()
3946
3947 - def create_sequence_and_triggers(self, query, table, **args):
3948 # post create table auto inc code (if needed) 3949 # modify table to btree for performance.... 3950 # Older Ingres releases could use rule/trigger like Oracle above. 3951 if hasattr(table,'_primarykey'): 3952 modify_tbl_sql = 'modify %s to btree unique on %s' % \ 3953 (table._tablename, 3954 ', '.join(["'%s'" % x for x in table.primarykey])) 3955 self.execute(modify_tbl_sql) 3956 else: 3957 tmp_seqname='%s_iisq' % table._tablename 3958 query=query.replace(INGRES_SEQNAME, tmp_seqname) 3959 self.execute('create sequence %s' % tmp_seqname) 3960 self.execute(query) 3961 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
3962 3963
3964 - def lastrowid(self,table):
3965 tmp_seqname='%s_iisq' % table 3966 self.execute('select current value for %s' % tmp_seqname) 3967 return int(self.cursor.fetchone()[0]) # don't really need int type cast here...
3968
3969 - def integrity_error_class(self):
3970 return self._driver.IntegrityError
3971
3972 3973 -class IngresUnicodeAdapter(IngresAdapter):
3974 3975 drivers = ('pyodbc',) 3976 3977 types = { 3978 'boolean': 'CHAR(1)', 3979 'string': 'NVARCHAR(%(length)s)', 3980 'text': 'NCLOB', 3981 'json': 'NCLOB', 3982 'password': 'NVARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes? 3983 'blob': 'BLOB', 3984 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type? 3985 'integer': 'INTEGER4', # or int8... 3986 'bigint': 'BIGINT', 3987 'float': 'FLOAT', 3988 'double': 'FLOAT8', 3989 'decimal': 'NUMERIC(%(precision)s,%(scale)s)', 3990 'date': 'ANSIDATE', 3991 'time': 'TIME WITHOUT TIME ZONE', 3992 'datetime': 'TIMESTAMP WITHOUT TIME ZONE', 3993 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME, 3994 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 3995 'list:integer': 'NCLOB', 3996 'list:string': 'NCLOB', 3997 'list:reference': 'NCLOB', 3998 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME, 3999 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 4000 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 4001 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO 4002 }
4003
4004 -class SAPDBAdapter(BaseAdapter):
4005 drivers = ('sapdb',) 4006 4007 support_distributed_transaction = False 4008 types = { 4009 'boolean': 'CHAR(1)', 4010 'string': 'VARCHAR(%(length)s)', 4011 'text': 'LONG', 4012 'json': 'LONG', 4013 'password': 'VARCHAR(%(length)s)', 4014 'blob': 'LONG', 4015 'upload': 'VARCHAR(%(length)s)', 4016 'integer': 'INT', 4017 'bigint': 'BIGINT', 4018 'float': 'FLOAT', 4019 'double': 'DOUBLE PRECISION', 4020 'decimal': 'FIXED(%(precision)s,%(scale)s)', 4021 'date': 'DATE', 4022 'time': 'TIME', 4023 'datetime': 'TIMESTAMP', 4024 'id': 'INT PRIMARY KEY', 4025 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 4026 'list:integer': 'LONG', 4027 'list:string': 'LONG', 4028 'list:reference': 'LONG', 4029 'big-id': 'BIGINT PRIMARY KEY', 4030 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s', 4031 } 4032
4033 - def sequence_name(self,table):
4034 return '%s_id_Seq' % table
4035
4036 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4037 if limitby: 4038 (lmin, lmax) = limitby 4039 if len(sql_w) > 1: 4040 sql_w_row = sql_w + ' AND w_row > %i' % lmin 4041 else: 4042 sql_w_row = 'WHERE w_row > %i' % lmin 4043 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o) 4044 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4045
4046 - def create_sequence_and_triggers(self, query, table, **args):
4047 # following lines should only be executed if table._sequence_name does not exist 4048 self.execute('CREATE SEQUENCE %s;' % table._sequence_name) 4049 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \ 4050 % (table._tablename, table._id.name, table._sequence_name)) 4051 self.execute(query)
4052 4053 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$') 4054 4055
4056 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 4057 credential_decoder=IDENTITY, driver_args={}, 4058 adapter_args={}, do_connect=True, after_connection=None):
4059 self.db = db 4060 self.dbengine = "sapdb" 4061 self.uri = uri 4062 if do_connect: self.find_driver(adapter_args,uri) 4063 self.pool_size = pool_size 4064 self.folder = folder 4065 self.db_codec = db_codec 4066 self._after_connection = after_connection 4067 self.find_or_make_work_folder() 4068 ruri = uri.split('://',1)[1] 4069 m = self.REGEX_URI.match(ruri) 4070 if not m: 4071 raise SyntaxError("Invalid URI string in DAL") 4072 user = credential_decoder(m.group('user')) 4073 if not user: 4074 raise SyntaxError('User required') 4075 password = credential_decoder(m.group('password')) 4076 if not password: 4077 password = '' 4078 host = m.group('host') 4079 if not host: 4080 raise SyntaxError('Host name required') 4081 db = m.group('db') 4082 if not db: 4083 raise SyntaxError('Database name required') 4084 def connector(user=user, password=password, database=db, 4085 host=host, driver_args=driver_args): 4086 return self.driver.Connection(user, password, database, 4087 host, **driver_args)
4088 self.connector = connector 4089 if do_connect: self.reconnect()
4090
4091 - def lastrowid(self,table):
4092 self.execute("select %s.NEXTVAL from dual" % table._sequence_name) 4093 return int(self.cursor.fetchone()[0])
4094
4095 -class CubridAdapter(MySQLAdapter):
4096 drivers = ('cubriddb',) 4097 4098 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$') 4099
4100 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8', 4101 credential_decoder=IDENTITY, driver_args={}, 4102 adapter_args={}, do_connect=True, after_connection=None):
4103 self.db = db 4104 self.dbengine = "cubrid" 4105 self.uri = uri 4106 if do_connect: self.find_driver(adapter_args,uri) 4107 self.pool_size = pool_size 4108 self.folder = folder 4109 self.db_codec = db_codec 4110 self._after_connection = after_connection 4111 self.find_or_make_work_folder() 4112 ruri = uri.split('://',1)[1] 4113 m = self.REGEX_URI.match(ruri) 4114 if not m: 4115 raise SyntaxError( 4116 "Invalid URI string in DAL: %s" % self.uri) 4117 user = credential_decoder(m.group('user')) 4118 if not user: 4119 raise SyntaxError('User required') 4120 password = credential_decoder(m.group('password')) 4121 if not password: 4122 password = '' 4123 host = m.group('host') 4124 if not host: 4125 raise SyntaxError('Host name required') 4126 db = m.group('db') 4127 if not db: 4128 raise SyntaxError('Database name required') 4129 port = int(m.group('port') or '30000') 4130 charset = m.group('charset') or 'utf8' 4131 user = credential_decoder(user) 4132 passwd = credential_decoder(password) 4133 def connector(host=host,port=port,db=db, 4134 user=user,passwd=password,driver_args=driver_args): 4135 return self.driver.connect(host,port,db,user,passwd,**driver_args)
4136 self.connector = connector 4137 if do_connect: self.reconnect()
4138
4139 - def after_connection(self):
4140 self.execute('SET FOREIGN_KEY_CHECKS=1;') 4141 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4142
4143 4144 ######## GAE MySQL ########## 4145 4146 -class DatabaseStoredFile:
4147 4148 web2py_filesystem = False 4149
4150 - def escape(self,obj):
4151 return self.db._adapter.escape(obj)
4152
4153 - def __init__(self,db,filename,mode):
4154 if not db._adapter.dbengine in ('mysql', 'postgres'): 4155 raise RuntimeError("only MySQL/Postgres can store metadata .table files in database for now") 4156 self.db = db 4157 self.filename = filename 4158 self.mode = mode 4159 if not self.web2py_filesystem: 4160 if db._adapter.dbengine == 'mysql': 4161 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;" 4162 elif db._adapter.dbengine == 'postgres': 4163 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));" 4164 self.db.executesql(sql) 4165 DatabaseStoredFile.web2py_filesystem = True 4166 self.p=0 4167 self.data = '' 4168 if mode in ('r','rw','a'): 4169 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \ 4170 % filename 4171 rows = self.db.executesql(query) 4172 if rows: 4173 self.data = rows[0][0] 4174 elif exists(filename): 4175 datafile = open(filename, 'r') 4176 try: 4177 self.data = datafile.read() 4178 finally: 4179 datafile.close() 4180 elif mode in ('r','rw'): 4181 raise RuntimeError("File %s does not exist" % filename)
4182
4183 - def read(self, bytes):
4184 data = self.data[self.p:self.p+bytes] 4185 self.p += len(data) 4186 return data
4187
4188 - def readline(self):
4189 i = self.data.find('\n',self.p)+1 4190 if i>0: 4191 data, self.p = self.data[self.p:i], i 4192 else: 4193 data, self.p = self.data[self.p:], len(self.data) 4194 return data
4195
4196 - def write(self,data):
4197 self.data += data
4198
4199 - def close_connection(self):
4200 if self.db is not None: 4201 self.db.executesql( 4202 "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename) 4203 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\ 4204 % (self.filename, self.data.replace("'","''")) 4205 self.db.executesql(query) 4206 self.db.commit() 4207 self.db = None
4208
4209 - def close(self):
4210 self.close_connection()
4211 4212 @staticmethod
4213 - def exists(db, filename):
4214 if exists(filename): 4215 return True 4216 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename 4217 if db.executesql(query): 4218 return True 4219 return False
4220
4221 4222 -class UseDatabaseStoredFile:
4223
4224 - def file_exists(self, filename):
4225 return DatabaseStoredFile.exists(self.db,filename)
4226
4227 - def file_open(self, filename, mode='rb', lock=True):
4228 return DatabaseStoredFile(self.db,filename,mode)
4229
4230 - def file_close(self, fileobj):
4231 fileobj.close_connection()
4232
4233 - def file_delete(self,filename):
4234 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename 4235 self.db.executesql(query) 4236 self.db.commit()
4237
4238 -class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
4239 uploads_in_blob = True 4240 4241 REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$') 4242
4243 - def __init__(self, db, uri='google:sql://realm:domain/database', 4244 pool_size=0, folder=None, db_codec='UTF-8', 4245 credential_decoder=IDENTITY, driver_args={}, 4246 adapter_args={}, do_connect=True, after_connection=None):
4247 4248 self.db = db 4249 self.dbengine = "mysql" 4250 self.uri = uri 4251 self.pool_size = pool_size 4252 self.db_codec = db_codec 4253 self._after_connection = after_connection 4254 self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split( 4255 os.sep+'applications'+os.sep,1)[1]) 4256 ruri = uri.split("://")[1] 4257 m = self.REGEX_URI.match(ruri) 4258 if not m: 4259 raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri) 4260 instance = credential_decoder(m.group('instance')) 4261 self.dbstring = db = credential_decoder(m.group('db')) 4262 driver_args['instance'] = instance 4263 if not 'charset' in driver_args: 4264 driver_args['charset'] = 'utf8' 4265 self.createdb = createdb = adapter_args.get('createdb',True) 4266 if not createdb: 4267 driver_args['database'] = db 4268 def connector(driver_args=driver_args): 4269 return rdbms.connect(**driver_args)
4270 self.connector = connector 4271 if do_connect: self.reconnect()
4272
4273 - def after_connection(self):
4274 if self.createdb: 4275 # self.execute('DROP DATABASE %s' % self.dbstring) 4276 self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring) 4277 self.execute('USE %s' % self.dbstring) 4278 self.execute("SET FOREIGN_KEY_CHECKS=1;") 4279 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4280
4281 - def execute(self, command, *a, **b):
4282 return self.log_execute(command.decode('utf8'), *a, **b)
4283
4284 -class NoSQLAdapter(BaseAdapter):
4285 can_select_for_update = False 4286 4287 @staticmethod
4288 - def to_unicode(obj):
4289 if isinstance(obj, str): 4290 return obj.decode('utf8') 4291 elif not isinstance(obj, unicode): 4292 return unicode(obj) 4293 return obj
4294
4295 - def id_query(self, table):
4296 return table._id > 0
4297
4298 - def represent(self, obj, fieldtype):
4299 field_is_type = fieldtype.startswith 4300 if isinstance(obj, CALLABLETYPES): 4301 obj = obj() 4302 if isinstance(fieldtype, SQLCustomType): 4303 return fieldtype.encoder(obj) 4304 if isinstance(obj, (Expression, Field)): 4305 raise SyntaxError("non supported on GAE") 4306 if self.dbengine == 'google:datastore': 4307 if isinstance(fieldtype, gae.Property): 4308 return obj 4309 is_string = isinstance(fieldtype,str) 4310 is_list = is_string and field_is_type('list:') 4311 if is_list: 4312 if not obj: 4313 obj = [] 4314 if not isinstance(obj, (list, tuple)): 4315 obj = [obj] 4316 if obj == '' and not \ 4317 (is_string and fieldtype[:2] in ['st','te', 'pa','up']): 4318 return None 4319 if not obj is None: 4320 if isinstance(obj, list) and not is_list: 4321 obj = [self.represent(o, fieldtype) for o in obj] 4322 elif fieldtype in ('integer','bigint','id'): 4323 obj = long(obj) 4324 elif fieldtype == 'double': 4325 obj = float(obj) 4326 elif is_string and field_is_type('reference'): 4327 if isinstance(obj, (Row, Reference)): 4328 obj = obj['id'] 4329 obj = long(obj) 4330 elif fieldtype == 'boolean': 4331 if obj and not str(obj)[0].upper() in '0F': 4332 obj = True 4333 else: 4334 obj = False 4335 elif fieldtype == 'date': 4336 if not isinstance(obj, datetime.date): 4337 (y, m, d) = map(int,str(obj).strip().split('-')) 4338 obj = datetime.date(y, m, d) 4339 elif isinstance(obj,datetime.datetime): 4340 (y, m, d) = (obj.year, obj.month, obj.day) 4341 obj = datetime.date(y, m, d) 4342 elif fieldtype == 'time': 4343 if not isinstance(obj, datetime.time): 4344 time_items = map(int,str(obj).strip().split(':')[:3]) 4345 if len(time_items) == 3: 4346 (h, mi, s) = time_items 4347 else: 4348 (h, mi, s) = time_items + [0] 4349 obj = datetime.time(h, mi, s) 4350 elif fieldtype == 'datetime': 4351 if not isinstance(obj, datetime.datetime): 4352 (y, m, d) = map(int,str(obj)[:10].strip().split('-')) 4353 time_items = map(int,str(obj)[11:].strip().split(':')[:3]) 4354 while len(time_items)<3: 4355 time_items.append(0) 4356 (h, mi, s) = time_items 4357 obj = datetime.datetime(y, m, d, h, mi, s) 4358 elif fieldtype == 'blob': 4359 pass 4360 elif fieldtype == 'json': 4361 obj = self.to_unicode(obj) 4362 if have_serializers: 4363 obj = serializers.loads_json(obj) 4364 elif simplejson: 4365 obj = simplejson.loads(obj) 4366 else: 4367 raise RuntimeError("missing simplejson") 4368 elif is_string and field_is_type('list:string'): 4369 return map(self.to_unicode,obj) 4370 elif is_list: 4371 return map(int,obj) 4372 else: 4373 obj = self.to_unicode(obj) 4374 return obj
4375
4376 - def _insert(self,table,fields):
4377 return 'insert %s in %s' % (fields, table)
4378
4379 - def _count(self,query,distinct=None):
4380 return 'count %s' % repr(query)
4381
4382 - def _select(self,query,fields,attributes):
4383 return 'select %s where %s' % (repr(fields), repr(query))
4384
4385 - def _delete(self,tablename, query):
4386 return 'delete %s where %s' % (repr(tablename),repr(query))
4387
4388 - def _update(self,tablename,query,fields):
4389 return 'update %s (%s) where %s' % (repr(tablename), 4390 repr(fields),repr(query))
4391
4392 - def commit(self):
4393 """ 4394 remember: no transactions on many NoSQL 4395 """ 4396 pass
4397
4398 - def rollback(self):
4399 """ 4400 remember: no transactions on many NoSQL 4401 """ 4402 pass
4403
4404 - def close_connection(self):
4405 """ 4406 remember: no transactions on many NoSQL 4407 """ 4408 pass
4409 4410 4411 # these functions should never be called!
4412 - def OR(self,first,second): raise SyntaxError("Not supported")
4413 - def AND(self,first,second): raise SyntaxError("Not supported")
4414 - def AS(self,first,second): raise SyntaxError("Not supported")
4415 - def ON(self,first,second): raise SyntaxError("Not supported")
4416 - def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
4417 - def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
4418 - def ADD(self,first,second): raise SyntaxError("Not supported")
4419 - def SUB(self,first,second): raise SyntaxError("Not supported")
4420 - def MUL(self,first,second): raise SyntaxError("Not supported")
4421 - def DIV(self,first,second): raise SyntaxError("Not supported")
4422 - def LOWER(self,first): raise SyntaxError("Not supported")
4423 - def UPPER(self,first): raise SyntaxError("Not supported")
4424 - def EXTRACT(self,first,what): raise SyntaxError("Not supported")
4425 - def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
4426 - def LEFT_JOIN(self): raise SyntaxError("Not supported")
4427 - def RANDOM(self): raise SyntaxError("Not supported")
4428 - def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
4429 - def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
4430 - def ILIKE(self,first,second): raise SyntaxError("Not supported")
4431 - def drop(self,table,mode): raise SyntaxError("Not supported")
4432 - def alias(self,table,alias): raise SyntaxError("Not supported")
4433 - def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
4434 - def distributed_transaction_begin(self,key): raise SyntaxError("Not supported")
4435 - def prepare(self,key): raise SyntaxError("Not supported")
4436 - def commit_prepared(self,key): raise SyntaxError("Not supported")
4437 - def rollback_prepared(self,key): raise SyntaxError("Not supported")
4438 - def concat_add(self,table): raise SyntaxError("Not supported")
4439 - def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
4440 - def create_sequence_and_triggers(self, query, table, **args): pass
4441 - def log_execute(self,*a,**b): raise SyntaxError("Not supported")
4442 - def execute(self,*a,**b): raise SyntaxError("Not supported")
4443 - def represent_exceptions(self, obj, fieldtype): raise SyntaxError("Not supported")
4444 - def lastrowid(self,table): raise SyntaxError("Not supported")
4445 - def integrity_error_class(self): raise SyntaxError("Not supported")
4446 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
4447
4448 4449 -class GAEF(object):
4450 - def __init__(self,name,op,value,apply):
4451 self.name=name=='id' and '__key__' or name 4452 self.op=op 4453 self.value=value 4454 self.apply=apply
4455 - def __repr__(self):
4456 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
4457
4458 -class GoogleDatastoreAdapter(NoSQLAdapter):
4459 uploads_in_blob = True 4460 types = {} 4461
4462 - def file_exists(self, filename): pass
4463 - def file_open(self, filename, mode='rb', lock=True): pass
4464 - def file_close(self, fileobj): pass
4465 4466 REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)') 4467
4468 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8', 4469 credential_decoder=IDENTITY, driver_args={}, 4470 adapter_args={}, do_connect=True, after_connection=None):
4471 self.types.update({ 4472 'boolean': gae.BooleanProperty, 4473 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)), 4474 'text': gae.TextProperty, 4475 'json': gae.TextProperty, 4476 'password': gae.StringProperty, 4477 'blob': gae.BlobProperty, 4478 'upload': gae.StringProperty, 4479 'integer': gae.IntegerProperty, 4480 'bigint': gae.IntegerProperty, 4481 'float': gae.FloatProperty, 4482 'double': gae.FloatProperty, 4483 'decimal': GAEDecimalProperty, 4484 'date': gae.DateProperty, 4485 'time': gae.TimeProperty, 4486 'datetime': gae.DateTimeProperty, 4487 'id': None, 4488 'reference': gae.IntegerProperty, 4489 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)), 4490 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)), 4491 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)), 4492 }) 4493 self.db = db 4494 self.uri = uri 4495 self.dbengine = 'google:datastore' 4496 self.folder = folder 4497 db['_lastsql'] = '' 4498 self.db_codec = 'UTF-8' 4499 self._after_connection = after_connection 4500 self.pool_size = 0 4501 match = self.REGEX_NAMESPACE.match(uri) 4502 if match: 4503 namespace_manager.set_namespace(match.group('namespace'))
4504
4505 - def parse_id(self, value, field_type):
4506 return value
4507
4508 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
4509 myfields = {} 4510 for field in table: 4511 if isinstance(polymodel,Table) and field.name in polymodel.fields(): 4512 continue 4513 attr = {} 4514 if isinstance(field.custom_qualifier, dict): 4515 #this is custom properties to add to the GAE field declartion 4516 attr = field.custom_qualifier 4517 field_type = field.type 4518 if isinstance(field_type, SQLCustomType): 4519 ftype = self.types[field_type.native or field_type.type](**attr) 4520 elif isinstance(field_type, gae.Property): 4521 ftype = field_type 4522 elif field_type.startswith('id'): 4523 continue 4524 elif field_type.startswith('decimal'): 4525 precision, scale = field_type[7:].strip('()').split(',') 4526 precision = int(precision) 4527 scale = int(scale) 4528 ftype = GAEDecimalProperty(precision, scale, **attr) 4529 elif field_type.startswith('reference'): 4530 if field.notnull: 4531 attr = dict(required=True) 4532 referenced = field_type[10:].strip() 4533 ftype = self.types[field_type[:9]](referenced, **attr) 4534 elif field_type.startswith('list:reference'): 4535 if field.notnull: 4536 attr['required'] = True 4537 referenced = field_type[15:].strip() 4538 ftype = self.types[field_type[:14]](**attr) 4539 elif field_type.startswith('list:'): 4540 ftype = self.types[field_type](**attr) 4541 elif not field_type in self.types\ 4542 or not self.types[field_type]: 4543 raise SyntaxError('Field: unknown field type: %s' % field_type) 4544 else: 4545 ftype = self.types[field_type](**attr) 4546 myfields[field.name] = ftype 4547 if not polymodel: 4548 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields) 4549 elif polymodel==True: 4550 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields) 4551 elif isinstance(polymodel,Table): 4552 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields) 4553 else: 4554 raise SyntaxError("polymodel must be None, True, a table or a tablename") 4555 return None
4556
4557 - def expand(self,expression,field_type=None):
4558 if isinstance(expression,Field): 4559 if expression.type in ('text', 'blob', 'json'): 4560 raise SyntaxError('AppEngine does not index by: %s' % expression.type) 4561 return expression.name 4562 elif isinstance(expression, (Expression, Query)): 4563 if not expression.second is None: 4564 return expression.op(expression.first, expression.second) 4565 elif not expression.first is None: 4566 return expression.op(expression.first) 4567 else: 4568 return expression.op() 4569 elif field_type: 4570 return self.represent(expression,field_type) 4571 elif isinstance(expression,(list,tuple)): 4572 return ','.join([self.represent(item,field_type) for item in expression]) 4573 else: 4574 return str(expression)
4575 4576 ### TODO from gql.py Expression
4577 - def AND(self,first,second):
4578 a = self.expand(first) 4579 b = self.expand(second) 4580 if b[0].name=='__key__' and a[0].name!='__key__': 4581 return b+a 4582 return a+b
4583
4584 - def EQ(self,first,second=None):
4585 if isinstance(second, Key): 4586 return [GAEF(first.name,'=',second,lambda a,b:a==b)] 4587 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
4588
4589 - def NE(self,first,second=None):
4590 if first.type != 'id': 4591 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)] 4592 else: 4593 if not second is None: 4594 second = Key.from_path(first._tablename, long(second)) 4595 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
4596
4597 - def LT(self,first,second=None):
4598 if first.type != 'id': 4599 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)] 4600 else: 4601 second = Key.from_path(first._tablename, long(second)) 4602 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
4603
4604 - def LE(self,first,second=None):
4605 if first.type != 'id': 4606 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)] 4607 else: 4608 second = Key.from_path(first._tablename, long(second)) 4609 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
4610
4611 - def GT(self,first,second=None):
4612 if first.type != 'id' or second==0 or second == '0': 4613 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)] 4614 else: 4615 second = Key.from_path(first._tablename, long(second)) 4616 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
4617
4618 - def GE(self,first,second=None):
4619 if first.type != 'id': 4620 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)] 4621 else: 4622 second = Key.from_path(first._tablename, long(second)) 4623 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
4624
4625 - def INVERT(self,first):
4626 return '-%s' % first.name
4627
4628 - def COMMA(self,first,second):
4629 return '%s, %s' % (self.expand(first),self.expand(second))
4630
4631 - def BELONGS(self,first,second=None):
4632 if not isinstance(second,(list, tuple)): 4633 raise SyntaxError("Not supported") 4634 if first.type != 'id': 4635 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)] 4636 else: 4637 second = [Key.from_path(first._tablename, int(i)) for i in second] 4638 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
4639
4640 - def CONTAINS(self,first,second,case_sensitive=False):
4641 # silently ignoring: GAE can only do case sensitive matches! 4642 if not first.type.startswith('list:'): 4643 raise SyntaxError("Not supported") 4644 return [GAEF(first.name,'=',self.expand(second,first.type[5:]),lambda a,b:b in a)]
4645
4646 - def NOT(self,first):
4647 nops = { self.EQ: self.NE, 4648 self.NE: self.EQ, 4649 self.LT: self.GE, 4650 self.GT: self.LE, 4651 self.LE: self.GT, 4652 self.GE: self.LT} 4653 if not isinstance(first,Query): 4654 raise SyntaxError("Not suported") 4655 nop = nops.get(first.op,None) 4656 if not nop: 4657 raise SyntaxError("Not suported %s" % first.op.__name__) 4658 first.op = nop 4659 return self.expand(first)
4660
4661 - def truncate(self,table,mode):
4662 self.db(self.db._adapter.id_query(table)).delete()
4663
4664 - def select_raw(self,query,fields=None,attributes=None):
4665 db = self.db 4666 fields = fields or [] 4667 attributes = attributes or {} 4668 args_get = attributes.get 4669 new_fields = [] 4670 for item in fields: 4671 if isinstance(item,SQLALL): 4672 new_fields += item._table 4673 else: 4674 new_fields.append(item) 4675 fields = new_fields 4676 if query: 4677 tablename = self.get_table(query) 4678 elif fields: 4679 tablename = fields[0].tablename 4680 query = db._adapter.id_query(fields[0].table) 4681 else: 4682 raise SyntaxError("Unable to determine a tablename") 4683 4684 if query: 4685 if use_common_filters(query): 4686 query = self.common_filter(query,[tablename]) 4687 4688 #tableobj is a GAE Model class (or subclass) 4689 tableobj = db[tablename]._tableobj 4690 filters = self.expand(query) 4691 4692 projection = None 4693 if len(db[tablename].fields) == len(fields): 4694 #getting all fields, not a projection query 4695 projection = None 4696 elif args_get('projection') == True: 4697 projection = [] 4698 for f in fields: 4699 if f.type in ['text', 'blob', 'json']: 4700 raise SyntaxError( 4701 "text and blob field types not allowed in projection queries") 4702 else: 4703 projection.append(f.name) 4704 4705 # projection's can't include 'id'. 4706 # it will be added to the result later 4707 query_projection = [ 4708 p for p in projection if \ 4709 p != db[tablename]._id.name] if projection \ 4710 else None 4711 4712 cursor = None 4713 if isinstance(args_get('reusecursor'), str): 4714 cursor = args_get('reusecursor') 4715 items = gae.Query(tableobj, projection=query_projection, 4716 cursor=cursor) 4717 4718 for filter in filters: 4719 if args_get('projection') == True and \ 4720 filter.name in query_projection and \ 4721 filter.op in ['=', '<=', '>=']: 4722 raise SyntaxError( 4723 "projection fields cannot have equality filters") 4724 if filter.name=='__key__' and filter.op=='>' and filter.value==0: 4725 continue 4726 elif filter.name=='__key__' and filter.op=='=': 4727 if filter.value==0: 4728 items = [] 4729 elif isinstance(filter.value, Key): 4730 # key qeuries return a class instance, 4731 # can't use projection 4732 # extra values will be ignored in post-processing later 4733 item = tableobj.get(filter.value) 4734 items = (item and [item]) or [] 4735 else: 4736 # key qeuries return a class instance, 4737 # can't use projection 4738 # extra values will be ignored in post-processing later 4739 item = tableobj.get_by_id(filter.value) 4740 items = (item and [item]) or [] 4741 elif isinstance(items,list): # i.e. there is a single record! 4742 items = [i for i in items if filter.apply( 4743 getattr(item,filter.name),filter.value)] 4744 else: 4745 if filter.name=='__key__' and filter.op != 'in': 4746 items.order('__key__') 4747 items = items.filter('%s %s' % (filter.name,filter.op), 4748 filter.value) 4749 if not isinstance(items,list): 4750 if args_get('left', None): 4751 raise SyntaxError('Set: no left join in appengine') 4752 if args_get('groupby', None): 4753 raise SyntaxError('Set: no groupby in appengine') 4754 orderby = args_get('orderby', False) 4755 if orderby: 4756 ### THIS REALLY NEEDS IMPROVEMENT !!! 4757 if isinstance(orderby, (list, tuple)): 4758 orderby = xorify(orderby) 4759 if isinstance(orderby,Expression): 4760 orderby = self.expand(orderby) 4761 orders = orderby.split(', ') 4762 for order in orders: 4763 order={'-id':'-__key__','id':'__key__'}.get(order,order) 4764 items = items.order(order) 4765 if args_get('limitby', None): 4766 (lmin, lmax) = attributes['limitby'] 4767 (limit, offset) = (lmax - lmin, lmin) 4768 rows = items.fetch(limit,offset=offset) 4769 #cursor is only useful if there was a limit and we didn't return 4770 # all results 4771 if args_get('reusecursor'): 4772 db['_lastcursor'] = items.cursor() 4773 items = rows 4774 return (items, tablename, projection or db[tablename].fields)
4775
4776 - def select(self,query,fields,attributes):
4777 """ 4778 This is the GAE version of select. some notes to consider: 4779 - db['_lastsql'] is not set because there is not SQL statement string 4780 for a GAE query 4781 - 'nativeRef' is a magical fieldname used for self references on GAE 4782 - optional attribute 'projection' when set to True will trigger 4783 use of the GAE projection queries. note that there are rules for 4784 what is accepted imposed by GAE: each field must be indexed, 4785 projection queries cannot contain blob or text fields, and you 4786 cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection 4787 - optional attribute 'reusecursor' allows use of cursor with queries 4788 that have the limitby attribute. Set the attribute to True for the 4789 first query, set it to the value of db['_lastcursor'] to continue 4790 a previous query. The user must save the cursor value between 4791 requests, and the filters must be identical. It is up to the user 4792 to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors 4793 """ 4794 4795 (items, tablename, fields) = self.select_raw(query,fields,attributes) 4796 # self.db['_lastsql'] = self._select(query,fields,attributes) 4797 rows = [[(t==self.db[tablename]._id.name and item) or \ 4798 (t=='nativeRef' and item) or getattr(item, t) \ 4799 for t in fields] for item in items] 4800 colnames = ['%s.%s' % (tablename, t) for t in fields] 4801 processor = attributes.get('processor',self.parse) 4802 return processor(rows,fields,colnames,False)
4803
4804 - def count(self,query,distinct=None,limit=None):
4805 if distinct: 4806 raise RuntimeError("COUNT DISTINCT not supported") 4807 (items, tablename, fields) = self.select_raw(query) 4808 # self.db['_lastsql'] = self._count(query) 4809 try: 4810 return len(items) 4811 except TypeError: 4812 return items.count(limit=limit)
4813
4814 - def delete(self,tablename, query):
4815 """ 4816 This function was changed on 2010-05-04 because according to 4817 http://code.google.com/p/googleappengine/issues/detail?id=3119 4818 GAE no longer supports deleting more than 1000 records. 4819 """ 4820 # self.db['_lastsql'] = self._delete(tablename,query) 4821 (items, tablename, fields) = self.select_raw(query) 4822 # items can be one item or a query 4823 if not isinstance(items,list): 4824 #use a keys_only query to ensure that this runs as a datastore 4825 # small operations 4826 leftitems = items.fetch(1000, keys_only=True) 4827 counter = 0 4828 while len(leftitems): 4829 counter += len(leftitems) 4830 gae.delete(leftitems) 4831 leftitems = items.fetch(1000, keys_only=True) 4832 else: 4833 counter = len(items) 4834 gae.delete(items) 4835 return counter
4836
4837 - def update(self,tablename,query,update_fields):
4838 # self.db['_lastsql'] = self._update(tablename,query,update_fields) 4839 (items, tablename, fields) = self.select_raw(query) 4840 counter = 0 4841 for item in items: 4842 for field, value in update_fields: 4843 setattr(item, field.name, self.represent(value,field.type)) 4844 item.put() 4845 counter += 1 4846 LOGGER.info(str(counter)) 4847 return counter
4848
4849 - def insert(self,table,fields):
4850 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields) 4851 # table._db['_lastsql'] = self._insert(table,fields) 4852 tmp = table._tableobj(**dfields) 4853 tmp.put() 4854 rid = Reference(tmp.key().id()) 4855 (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key()) 4856 return rid
4857
4858 - def bulk_insert(self,table,items):
4859 parsed_items = [] 4860 for item in items: 4861 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item) 4862 parsed_items.append(table._tableobj(**dfields)) 4863 gae.put(parsed_items) 4864 return True
4865
4866 -def uuid2int(uuidv):
4867 return uuid.UUID(uuidv).int
4868
4869 -def int2uuid(n):
4870 return str(uuid.UUID(int=n))
4871
4872 -class CouchDBAdapter(NoSQLAdapter):
4873 drivers = ('couchdb',) 4874 4875 uploads_in_blob = True 4876 types = { 4877 'boolean': bool, 4878 'string': str, 4879 'text': str, 4880 'json': str, 4881 'password': str, 4882 'blob': str, 4883 'upload': str, 4884 'integer': long, 4885 'bigint': long, 4886 'float': float, 4887 'double': float, 4888 'date': datetime.date, 4889 'time': datetime.time, 4890 'datetime': datetime.datetime, 4891 'id': long, 4892 'reference': long, 4893 'list:string': list, 4894 'list:integer': list, 4895 'list:reference': list, 4896 } 4897
4898 - def file_exists(self, filename): pass
4899 - def file_open(self, filename, mode='rb', lock=True): pass
4900 - def file_close(self, fileobj): pass
4901
4902 - def expand(self,expression,field_type=None):
4903 if isinstance(expression,Field): 4904 if expression.type=='id': 4905 return "%s._id" % expression.tablename 4906 return BaseAdapter.expand(self,expression,field_type)
4907
4908 - def AND(self,first,second):
4909 return '(%s && %s)' % (self.expand(first),self.expand(second))
4910
4911 - def OR(self,first,second):
4912 return '(%s || %s)' % (self.expand(first),self.expand(second))
4913
4914 - def EQ(self,first,second):
4915 if second is None: 4916 return '(%s == null)' % self.expand(first) 4917 return '(%s == %s)' % (self.expand(first),self.expand(second,first.type))
4918
4919 - def NE(self,first,second):
4920 if second is None: 4921 return '(%s != null)' % self.expand(first) 4922 return '(%s != %s)' % (self.expand(first),self.expand(second,first.type))
4923
4924 - def COMMA(self,first,second):
4925 return '%s + %s' % (self.expand(first),self.expand(second))
4926
4927 - def represent(self, obj, fieldtype):
4928 value = NoSQLAdapter.represent(self, obj, fieldtype) 4929 if fieldtype=='id': 4930 return repr(str(int(value))) 4931 elif fieldtype in ('date','time','datetime','boolean'): 4932 return serializers.json(value) 4933 return repr(not isinstance(value,unicode) and value \ 4934 or value and value.encode('utf8'))
4935
4936 - def __init__(self,db,uri='couchdb://127.0.0.1:5984', 4937 pool_size=0,folder=None,db_codec ='UTF-8', 4938 credential_decoder=IDENTITY, driver_args={}, 4939 adapter_args={}, do_connect=True, after_connection=None):
4940 self.db = db 4941 self.uri = uri 4942 if do_connect: self.find_driver(adapter_args) 4943 self.dbengine = 'couchdb' 4944 self.folder = folder 4945 db['_lastsql'] = '' 4946 self.db_codec = 'UTF-8' 4947 self._after_connection = after_connection 4948 self.pool_size = pool_size 4949 4950 url='http://'+uri[10:] 4951 def connector(url=url,driver_args=driver_args): 4952 return self.driver.Server(url,**driver_args)
4953 self.reconnect(connector,cursor=False)
4954
4955 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
4956 if migrate: 4957 try: 4958 self.connection.create(table._tablename) 4959 except: 4960 pass
4961
4962 - def insert(self,table,fields):
4963 id = uuid2int(web2py_uuid()) 4964 ctable = self.connection[table._tablename] 4965 values = dict((k.name,self.represent(v,k.type)) for k,v in fields) 4966 values['_id'] = str(id) 4967 ctable.save(values) 4968 return id
4969
4970 - def _select(self,query,fields,attributes):
4971 if not isinstance(query,Query): 4972 raise SyntaxError("Not Supported") 4973 for key in set(attributes.keys())-SELECT_ARGS: 4974 raise SyntaxError('invalid select attribute: %s' % key) 4975 new_fields=[] 4976 for item in fields: 4977 if isinstance(item,SQLALL): 4978 new_fields += item._table 4979 else: 4980 new_fields.append(item) 4981 def uid(fd): 4982 return fd=='id' and '_id' or fd
4983 def get(row,fd): 4984 return fd=='id' and int(row['_id']) or row.get(fd,None) 4985 fields = new_fields 4986 tablename = self.get_table(query) 4987 fieldnames = [f.name for f in (fields or self.db[tablename])] 4988 colnames = ['%s.%s' % (tablename,k) for k in fieldnames] 4989 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames]) 4990 fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\ 4991 dict(t=tablename, 4992 query=self.expand(query), 4993 order='%s._id' % tablename, 4994 fields=fields) 4995 return fn, colnames 4996
4997 - def select(self,query,fields,attributes):
4998 if not isinstance(query,Query): 4999 raise SyntaxError("Not Supported") 5000 fn, colnames = self._select(query,fields,attributes) 5001 tablename = colnames[0].split('.')[0] 5002 ctable = self.connection[tablename] 5003 rows = [cols['value'] for cols in ctable.query(fn)] 5004 processor = attributes.get('processor',self.parse) 5005 return processor(rows,fields,colnames,False)
5006
5007 - def delete(self,tablename,query):
5008 if not isinstance(query,Query): 5009 raise SyntaxError("Not Supported") 5010 if query.first.type=='id' and query.op==self.EQ: 5011 id = query.second 5012 tablename = query.first.tablename 5013 assert(tablename == query.first.tablename) 5014 ctable = self.connection[tablename] 5015 try: 5016 del ctable[str(id)] 5017 return 1 5018 except couchdb.http.ResourceNotFound: 5019 return 0 5020 else: 5021 tablename = self.get_table(query) 5022 rows = self.select(query,[self.db[tablename]._id],{}) 5023 ctable = self.connection[tablename] 5024 for row in rows: 5025 del ctable[str(row.id)] 5026 return len(rows)
5027
5028 - def update(self,tablename,query,fields):
5029 if not isinstance(query,Query): 5030 raise SyntaxError("Not Supported") 5031 if query.first.type=='id' and query.op==self.EQ: 5032 id = query.second 5033 tablename = query.first.tablename 5034 ctable = self.connection[tablename] 5035 try: 5036 doc = ctable[str(id)] 5037 for key,value in fields: 5038 doc[key.name] = self.represent(value,self.db[tablename][key.name].type) 5039 ctable.save(doc) 5040 return 1 5041 except couchdb.http.ResourceNotFound: 5042 return 0 5043 else: 5044 tablename = self.get_table(query) 5045 rows = self.select(query,[self.db[tablename]._id],{}) 5046 ctable = self.connection[tablename] 5047 table = self.db[tablename] 5048 for row in rows: 5049 doc = ctable[str(row.id)] 5050 for key,value in fields: 5051 doc[key.name] = self.represent(value,table[key.name].type) 5052 ctable.save(doc) 5053 return len(rows)
5054
5055 - def count(self,query,distinct=None):
5056 if distinct: 5057 raise RuntimeError("COUNT DISTINCT not supported") 5058 if not isinstance(query,Query): 5059 raise SyntaxError("Not Supported") 5060 tablename = self.get_table(query) 5061 rows = self.select(query,[self.db[tablename]._id],{}) 5062 return len(rows)
5063
5064 -def cleanup(text):
5065 """ 5066 validates that the given text is clean: only contains [0-9a-zA-Z_] 5067 """ 5068 if not REGEX_ALPHANUMERIC.match(text): 5069 raise SyntaxError('invalid table or field name: %s' % text) 5070 return text
5071
5072 -class MongoDBAdapter(NoSQLAdapter):
5073 native_json = True 5074 drivers = ('pymongo',) 5075 5076 uploads_in_blob = True 5077 5078 types = { 5079 'boolean': bool, 5080 'string': str, 5081 'text': str, 5082 'json': str, 5083 'password': str, 5084 'blob': str, 5085 'upload': str, 5086 'integer': long, 5087 'bigint': long, 5088 'float': float, 5089 'double': float, 5090 'date': datetime.date, 5091 'time': datetime.time, 5092 'datetime': datetime.datetime, 5093 'id': long, 5094 'reference': long, 5095 'list:string': list, 5096 'list:integer': list, 5097 'list:reference': list, 5098 } 5099 5100 error_messages = {"javascript_needed": "This must yet be replaced" + 5101 " with javascript in order to work."} 5102
5103 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db', 5104 pool_size=0, folder=None, db_codec ='UTF-8', 5105 credential_decoder=IDENTITY, driver_args={}, 5106 adapter_args={}, do_connect=True, after_connection=None):
5107 5108 self.db = db 5109 self.uri = uri 5110 if do_connect: self.find_driver(adapter_args) 5111 import random 5112 from bson.objectid import ObjectId 5113 from bson.son import SON 5114 import pymongo.uri_parser 5115 5116 m = pymongo.uri_parser.parse_uri(uri) 5117 5118 self.SON = SON 5119 self.ObjectId = ObjectId 5120 self.random = random 5121 5122 self.dbengine = 'mongodb' 5123 self.folder = folder 5124 db['_lastsql'] = '' 5125 self.db_codec = 'UTF-8' 5126 self._after_connection = after_connection 5127 self.pool_size = pool_size 5128 #this is the minimum amount of replicates that it should wait 5129 # for on insert/update 5130 self.minimumreplication = adapter_args.get('minimumreplication',0) 5131 # by default all inserts and selects are performand asynchronous, 5132 # but now the default is 5133 # synchronous, except when overruled by either this default or 5134 # function parameter 5135 self.safe = adapter_args.get('safe',True) 5136 5137 if isinstance(m,tuple): 5138 m = {"database" : m[1]} 5139 if m.get('database')==None: 5140 raise SyntaxError("Database is required!") 5141 def connector(uri=self.uri,m=m): 5142 try: 5143 # Connection() is deprecated 5144 if hasattr(self.driver, "MongoClient"): 5145 Connection = self.driver.MongoClient 5146 else: 5147 Connection = self.driver.Connection 5148 return Connection(uri)[m.get('database')] 5149 except self.driver.errors.ConnectionFailure: 5150 inst = sys.exc_info()[1] 5151 raise SyntaxError("The connection to " + 5152 uri + " could not be made")
5153 5154 self.reconnect(connector,cursor=False)
5155
5156 - def object_id(self, arg=None):
5157 """ Convert input to a valid Mongodb ObjectId instance 5158 5159 self.object_id("<random>") -> ObjectId (not unique) instance """ 5160 if not arg: 5161 arg = 0 5162 if isinstance(arg, basestring): 5163 # we assume an integer as default input 5164 rawhex = len(arg.replace("0x", "").replace("L", "")) == 24 5165 if arg.isdigit() and (not rawhex): 5166 arg = int(arg) 5167 elif arg == "<random>": 5168 arg = int("0x%sL" % \ 5169 "".join([self.random.choice("0123456789abcdef") \ 5170 for x in range(24)]), 0) 5171 elif arg.isalnum(): 5172 if not arg.startswith("0x"): 5173 arg = "0x%s" % arg 5174 try: 5175 arg = int(arg, 0) 5176 except ValueError, e: 5177 raise ValueError( 5178 "invalid objectid argument string: %s" % e) 5179 else: 5180 raise ValueError("Invalid objectid argument string. " + 5181 "Requires an integer or base 16 value") 5182 elif isinstance(arg, self.ObjectId): 5183 return arg 5184 if not isinstance(arg, (int, long)): 5185 raise TypeError("object_id argument must be of type " + 5186 "ObjectId or an objectid representable integer") 5187 if arg == 0: 5188 hexvalue = "".zfill(24) 5189 else: 5190 hexvalue = hex(arg)[2:].replace("L", "") 5191 return self.ObjectId(hexvalue)
5192
5193 - def represent(self, obj, fieldtype):
5194 value = NoSQLAdapter.represent(self, obj, fieldtype) 5195 if fieldtype =='date': 5196 if value == None: 5197 return value 5198 # this piece of data can be stripped off based on the fieldtype 5199 t = datetime.time(0, 0, 0) 5200 # mongodb doesn't has a date object and so it must datetime, 5201 # string or integer 5202 return datetime.datetime.combine(value, t) 5203 elif fieldtype == 'time': 5204 if value == None: 5205 return value 5206 # this piece of data can be stripped of based on the fieldtype 5207 d = datetime.date(2000, 1, 1) 5208 # mongodb doesn't has a time object and so it must datetime, 5209 # string or integer 5210 return datetime.datetime.combine(d, value) 5211 elif fieldtype == 'list:string' or \ 5212 fieldtype == 'list:integer' or \ 5213 fieldtype == 'list:reference': 5214 return value 5215 return value
5216 5217 # Safe determines whether a asynchronious request is done or a 5218 # synchronious action is done 5219 # For safety, we use by default synchronious requests
5220 - def insert(self, table, fields, safe=None):
5221 if safe==None: 5222 safe = self.safe 5223 ctable = self.connection[table._tablename] 5224 values = dict() 5225 for k, v in fields: 5226 if not k.name in ["id", "safe"]: 5227 fieldname = k.name 5228 fieldtype = table[k.name].type 5229 if ("reference" in fieldtype) or (fieldtype=="id"): 5230 values[fieldname] = self.object_id(v) 5231 else: 5232 values[fieldname] = self.represent(v, fieldtype) 5233 ctable.insert(values, safe=safe) 5234 return int(str(values['_id']), 16)
5235
5236 - def create_table(self, table, migrate=True, fake_migrate=False, 5237 polymodel=None, isCapped=False):
5238 if isCapped: 5239 raise RuntimeError("Not implemented")
5240
5241 - def count(self, query, distinct=None, snapshot=True):
5242 if distinct: 5243 raise RuntimeError("COUNT DISTINCT not supported") 5244 if not isinstance(query,Query): 5245 raise SyntaxError("Not Supported") 5246 tablename = self.get_table(query) 5247 return int(self.select(query,[self.db[tablename]._id], {}, 5248 count=True,snapshot=snapshot)['count'])
5249 # Maybe it would be faster if we just implemented the pymongo 5250 # .count() function which is probably quicker? 5251 # therefor call __select() connection[table].find(query).count() 5252 # Since this will probably reduce the return set? 5253
5254 - def expand(self, expression, field_type=None):
5255 if isinstance(expression, Query): 5256 # any query using 'id':= 5257 # set name as _id (as per pymongo/mongodb primary key) 5258 # convert second arg to an objectid field 5259 # (if its not already) 5260 # if second arg is 0 convert to objectid 5261 if isinstance(expression.first,Field) and \ 5262 ((expression.first.type == 'id') or \ 5263 ("reference" in expression.first.type)): 5264 if expression.first.type == 'id': 5265 expression.first.name = '_id' 5266 # cast to Mongo ObjectId 5267 expression.second = self.object_id(expression.second) 5268 result = expression.op(expression.first, expression.second) 5269 if isinstance(expression, Field): 5270 if expression.type=='id': 5271 result = "_id" 5272 else: 5273 result = expression.name 5274 5275 elif isinstance(expression, (Expression, Query)): 5276 if not expression.second is None: 5277 result = expression.op(expression.first, expression.second) 5278 elif not expression.first is None: 5279 result = expression.op(expression.first) 5280 elif not isinstance(expression.op, str): 5281 result = expression.op() 5282 else: 5283 result = expression.op 5284 elif field_type: 5285 result = str(self.represent(expression,field_type)) 5286 elif isinstance(expression,(list,tuple)): 5287 result = ','.join(self.represent(item,field_type) for 5288 item in expression) 5289 else: 5290 result = expression 5291 return result
5292
5293 - def _select(self, query, fields, attributes):
5294 if 'for_update' in attributes: 5295 logging.warn('mongodb does not support for_update') 5296 for key in set(attributes.keys())-set(('limitby', 5297 'orderby','for_update')): 5298 if attributes[key]!=None: 5299 logging.warn('select attribute not implemented: %s' % key) 5300 5301 new_fields=[] 5302 mongosort_list = [] 5303 5304 # try an orderby attribute 5305 orderby = attributes.get('orderby', False) 5306 limitby = attributes.get('limitby', False) 5307 # distinct = attributes.get('distinct', False) 5308 if orderby: 5309 if isinstance(orderby, (list, tuple)): 5310 orderby = xorify(orderby) 5311 5312 # !!!! need to add 'random' 5313 for f in self.expand(orderby).split(','): 5314 if f.startswith('-'): 5315 mongosort_list.append((f[1:], -1)) 5316 else: 5317 mongosort_list.append((f, 1)) 5318 5319 if limitby: 5320 limitby_skip, limitby_limit = limitby 5321 else: 5322 limitby_skip = limitby_limit = 0 5323 5324 mongofields_dict = self.SON() 5325 mongoqry_dict = {} 5326 for item in fields: 5327 if isinstance(item, SQLALL): 5328 new_fields += item._table 5329 else: 5330 new_fields.append(item) 5331 fields = new_fields 5332 if isinstance(query,Query): 5333 tablename = self.get_table(query) 5334 elif len(fields) != 0: 5335 tablename = fields[0].tablename 5336 else: 5337 raise SyntaxError("The table name could not be found in " + 5338 "the query nor from the select statement.") 5339 5340 mongoqry_dict = self.expand(query) 5341 fields = fields or self.db[tablename] 5342 for field in fields: 5343 mongofields_dict[field.name] = 1 5344 5345 return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \ 5346 limitby_limit, limitby_skip
5347 5348
5349 - def select(self, query, fields, attributes, count=False, 5350 snapshot=False):
5351 # TODO: support joins 5352 tablename, mongoqry_dict, mongofields_dict, mongosort_list, \ 5353 limitby_limit, limitby_skip = self._select(query, fields, attributes) 5354 ctable = self.connection[tablename] 5355 5356 if count: 5357 return {'count' : ctable.find( 5358 mongoqry_dict, mongofields_dict, 5359 skip=limitby_skip, limit=limitby_limit, 5360 sort=mongosort_list, snapshot=snapshot).count()} 5361 else: 5362 # pymongo cursor object 5363 mongo_list_dicts = ctable.find(mongoqry_dict, 5364 mongofields_dict, skip=limitby_skip, 5365 limit=limitby_limit, sort=mongosort_list, 5366 snapshot=snapshot) 5367 rows = [] 5368 # populate row in proper order 5369 # Here we replace ._id with .id to follow the standard naming 5370 colnames = [] 5371 newnames = [] 5372 for field in fields: 5373 colname = str(field) 5374 colnames.append(colname) 5375 tablename, fieldname = colname.split(".") 5376 if fieldname == "_id": 5377 # Mongodb reserved uuid key 5378 field.name = "id" 5379 newnames.append(".".join((tablename, field.name))) 5380 5381 for record in mongo_list_dicts: 5382 row=[] 5383 for colname in colnames: 5384 tablename, fieldname = colname.split(".") 5385 # switch to Mongo _id uuids for retrieving 5386 # record id's 5387 if fieldname == "id": fieldname = "_id" 5388 if fieldname in record: 5389 if isinstance(record[fieldname], 5390 self.ObjectId): 5391 value = int(str(record[fieldname]), 16) 5392 else: 5393 value = record[fieldname] 5394 else: 5395 value = None 5396 row.append(value) 5397 rows.append(row) 5398 processor = attributes.get('processor', self.parse) 5399 result = processor(rows, fields, newnames, False) 5400 return result
5401 5402
5403 - def INVERT(self, first):
5404 #print "in invert first=%s" % first 5405 return '-%s' % self.expand(first)
5406
5407 - def drop(self, table, mode=''):
5408 ctable = self.connection[table._tablename] 5409 ctable.drop()
5410 5411
5412 - def truncate(self, table, mode, safe=None):
5413 if safe == None: 5414 safe=self.safe 5415 ctable = self.connection[table._tablename] 5416 ctable.remove(None, safe=True)
5417
5418 - def oupdate(self, tablename, query, fields):
5419 if not isinstance(query, Query): 5420 raise SyntaxError("Not Supported") 5421 filter = None 5422 if query: 5423 filter = self.expand(query) 5424 modify = {'$set': dict((k.name, self.represent(v, k.type)) for 5425 k, v in fields)} 5426 return modify, filter
5427
5428 - def update(self, tablename, query, fields, safe=None):
5429 if safe == None: 5430 safe = self.safe 5431 # return amount of adjusted rows or zero, but no exceptions 5432 # @ related not finding the result 5433 if not isinstance(query, Query): 5434 raise RuntimeError("Not implemented") 5435 amount = self.count(query, False) 5436 modify, filter = self.oupdate(tablename, query, fields) 5437 try: 5438 result = self.connection[tablename].update(filter, 5439 modify, multi=True, safe=safe) 5440 if safe: 5441 try: 5442 # if result count is available fetch it 5443 return result["n"] 5444 except (KeyError, AttributeError, TypeError): 5445 return amount 5446 else: 5447 return amount 5448 except Exception, e: 5449 # TODO Reverse update query to verifiy that the query succeded 5450 raise RuntimeError("uncaught exception when updating rows: %s" % e)
5451 5452 #this function returns a dict with the where clause and update fields
5453 - def _update(self,tablename,query,fields):
5454 return str(self.oupdate(tablename, query, fields))
5455
5456 - def delete(self, tablename, query, safe=None):
5457 if safe is None: 5458 safe = self.safe 5459 amount = 0 5460 amount = self.count(query, False) 5461 if not isinstance(query, Query): 5462 raise RuntimeError("query type %s is not supported" % \ 5463 type(query)) 5464 filter = self.expand(query) 5465 self._delete(tablename, filter, safe=safe) 5466 return amount
5467
5468 - def _delete(self, tablename, filter, safe=None):
5469 return self.connection[tablename].remove(filter, safe=safe)
5470
5471 - def bulk_insert(self, table, items):
5472 return [self.insert(table,item) for item in items]
5473 5474 # TODO This will probably not work:(
5475 - def NOT(self, first):
5476 result = {} 5477 result["$not"] = self.expand(first) 5478 return result
5479
5480 - def AND(self,first,second):
5481 f = self.expand(first) 5482 s = self.expand(second) 5483 f.update(s) 5484 return f
5485
5486 - def OR(self,first,second):
5487 # pymongo expects: .find({'$or': [{'name':'1'}, {'name':'2'}]}) 5488 result = {} 5489 f = self.expand(first) 5490 s = self.expand(second) 5491 result['$or'] = [f,s] 5492 return result
5493
5494 - def BELONGS(self, first, second):
5495 if isinstance(second, str): 5496 return {self.expand(first) : {"$in" : [ second[:-1]]} } 5497 elif second==[] or second==(): 5498 return {1:0} 5499 items = [self.expand(item, first.type) for item in second] 5500 return {self.expand(first) : {"$in" : items} }
5501
5502 - def EQ(self,first,second):
5503 result = {} 5504 result[self.expand(first)] = self.expand(second) 5505 return result
5506
5507 - def NE(self, first, second=None):
5508 result = {} 5509 result[self.expand(first)] = {'$ne': self.expand(second)} 5510 return result
5511
5512 - def LT(self,first,second=None):
5513 if second is None: 5514 raise RuntimeError("Cannot compare %s < None" % first) 5515 result = {} 5516 result[self.expand(first)] = {'$lt': self.expand(second)} 5517 return result
5518
5519 - def LE(self,first,second=None):
5520 if second is None: 5521 raise RuntimeError("Cannot compare %s <= None" % first) 5522 result = {} 5523 result[self.expand(first)] = {'$lte': self.expand(second)} 5524 return result
5525
5526 - def GT(self,first,second):
5527 result = {} 5528 result[self.expand(first)] = {'$gt': self.expand(second)} 5529 return result
5530
5531 - def GE(self,first,second=None):
5532 if second is None: 5533 raise RuntimeError("Cannot compare %s >= None" % first) 5534 result = {} 5535 result[self.expand(first)] = {'$gte': self.expand(second)} 5536 return result
5537
5538 - def ADD(self, first, second):
5539 raise NotImplementedError(self.error_messages["javascript_needed"]) 5540 return '%s + %s' % (self.expand(first), 5541 self.expand(second, first.type))
5542
5543 - def SUB(self, first, second):
5544 raise NotImplementedError(self.error_messages["javascript_needed"]) 5545 return '(%s - %s)' % (self.expand(first), 5546 self.expand(second, first.type))
5547
5548 - def MUL(self, first, second):
5549 raise NotImplementedError(self.error_messages["javascript_needed"]) 5550 return '(%s * %s)' % (self.expand(first), 5551 self.expand(second, first.type))
5552
5553 - def DIV(self, first, second):
5554 raise NotImplementedError(self.error_messages["javascript_needed"]) 5555 return '(%s / %s)' % (self.expand(first), 5556 self.expand(second, first.type))
5557
5558 - def MOD(self, first, second):
5559 raise NotImplementedError(self.error_messages["javascript_needed"]) 5560 return '(%s %% %s)' % (self.expand(first), 5561 self.expand(second, first.type))
5562
5563 - def AS(self, first, second):
5564 raise NotImplementedError(self.error_messages["javascript_needed"]) 5565 return '%s AS %s' % (self.expand(first), second)
5566 5567 # We could implement an option that simulates a full featured SQL 5568 # database. But I think the option should be set explicit or 5569 # implemented as another library.
5570 - def ON(self, first, second):
5571 raise NotImplementedError("This is not possible in NoSQL" + 5572 " but can be simulated with a wrapper.") 5573 return '%s ON %s' % (self.expand(first), self.expand(second))
5574 5575 # BLOW ARE TWO IMPLEMENTATIONS OF THE SAME FUNCITONS 5576 # WHICH ONE IS BEST? 5577
5578 - def COMMA(self, first, second):
5579 return '%s, %s' % (self.expand(first), self.expand(second))
5580
5581 - def LIKE(self, first, second):
5582 #escaping regex operators? 5583 return {self.expand(first): ('%s' % \ 5584 self.expand(second, 'string').replace('%','/'))}
5585
5586 - def STARTSWITH(self, first, second):
5587 #escaping regex operators? 5588 return {self.expand(first): ('/^%s/' % \ 5589 self.expand(second, 'string'))}
5590
5591 - def ENDSWITH(self, first, second):
5592 #escaping regex operators? 5593 return {self.expand(first): ('/%s^/' % \ 5594 self.expand(second, 'string'))}
5595
5596 - def CONTAINS(self, first, second, case_sensitive=False):
5597 # silently ignore, only case sensitive 5598 # There is a technical difference, but mongodb doesn't support 5599 # that, but the result will be the same 5600 return {self.expand(first) : ('/%s/' % \ 5601 self.expand(second, 'string'))}
5602
5603 - def LIKE(self, first, second):
5604 import re 5605 return {self.expand(first): {'$regex': \ 5606 re.escape(self.expand(second, 5607 'string')).replace('%','.*')}}
5608 5609 #TODO verify full compatibilty with official SQL Like operator
5610 - def STARTSWITH(self, first, second):
5611 #TODO Solve almost the same problem as with endswith 5612 import re 5613 return {self.expand(first): {'$regex' : '^' + 5614 re.escape(self.expand(second, 5615 'string'))}}
5616 5617 #TODO verify full compatibilty with official SQL Like operator
5618 - def ENDSWITH(self, first, second):
5619 #escaping regex operators? 5620 #TODO if searched for a name like zsa_corbitt and the function 5621 # is endswith('a') then this is also returned. 5622 # Aldo it end with a t 5623 import re 5624 return {self.expand(first): {'$regex': \ 5625 re.escape(self.expand(second, 'string')) + '$'}}
5626 5627 #TODO verify full compatibilty with official oracle contains operator
5628 - def CONTAINS(self, first, second, case_sensitive=False):
5629 # silently ignore, only case sensitive 5630 #There is a technical difference, but mongodb doesn't support 5631 # that, but the result will be the same 5632 #TODO contains operators need to be transformed to Regex 5633 return {self.expand(first) : {' $regex': \ 5634 ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
5635
5636 5637 -class IMAPAdapter(NoSQLAdapter):
5638 drivers = ('imaplib',) 5639 5640 """ IMAP server adapter 5641 5642 This class is intended as an interface with 5643 email IMAP servers to perform simple queries in the 5644 web2py DAL query syntax, so email read, search and 5645 other related IMAP mail services (as those implemented 5646 by brands like Google(r), and Yahoo!(r) 5647 can be managed from web2py applications. 5648 5649 The code uses examples by Yuji Tomita on this post: 5650 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137 5651 and is based in docs for Python imaplib, python email 5652 and email IETF's (i.e. RFC2060 and RFC3501) 5653 5654 This adapter was tested with a small set of operations with Gmail(r). Other 5655 services requests could raise command syntax and response data issues. 5656 5657 It creates its table and field names "statically", 5658 meaning that the developer should leave the table and field 5659 definitions to the DAL instance by calling the adapter's 5660 .define_tables() method. The tables are defined with the 5661 IMAP server mailbox list information. 5662 5663 .define_tables() returns a dictionary mapping dal tablenames 5664 to the server mailbox names with the following structure: 5665 5666 {<tablename>: str <server mailbox name>} 5667 5668 Here is a list of supported fields: 5669 5670 Field Type Description 5671 ################################################################ 5672 uid string 5673 answered boolean Flag 5674 created date 5675 content list:string A list of text or html parts 5676 to string 5677 cc string 5678 bcc string 5679 size integer the amount of octets of the message* 5680 deleted boolean Flag 5681 draft boolean Flag 5682 flagged boolean Flag 5683 sender string 5684 recent boolean Flag 5685 seen boolean Flag 5686 subject string 5687 mime string The mime header declaration 5688 email string The complete RFC822 message** 5689 attachments <type list> Each non text part as dict 5690 encoding string The main detected encoding 5691 5692 *At the application side it is measured as the length of the RFC822 5693 message string 5694 5695 WARNING: As row id's are mapped to email sequence numbers, 5696 make sure your imap client web2py app does not delete messages 5697 during select or update actions, to prevent 5698 updating or deleting different messages. 5699 Sequence numbers change whenever the mailbox is updated. 5700 To avoid this sequence numbers issues, it is recommended the use 5701 of uid fields in query references (although the update and delete 5702 in separate actions rule still applies). 5703 5704 # This is the code recommended to start imap support 5705 # at the app's model: 5706 5707 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl 5708 imapdb.define_tables() 5709 5710 Here is an (incomplete) list of possible imap commands: 5711 5712 # Count today's unseen messages 5713 # smaller than 6000 octets from the 5714 # inbox mailbox 5715 5716 q = imapdb.INBOX.seen == False 5717 q &= imapdb.INBOX.created == datetime.date.today() 5718 q &= imapdb.INBOX.size < 6000 5719 unread = imapdb(q).count() 5720 5721 # Fetch last query messages 5722 rows = imapdb(q).select() 5723 5724 # it is also possible to filter query select results with limitby and 5725 # sequences of mailbox fields 5726 5727 set.select(<fields sequence>, limitby=(<int>, <int>)) 5728 5729 # Mark last query messages as seen 5730 messages = [row.uid for row in rows] 5731 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True) 5732 5733 # Delete messages in the imap database that have mails from mr. Gumby 5734 5735 deleted = 0 5736 for mailbox in imapdb.tables 5737 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete() 5738 5739 # It is possible also to mark messages for deletion instead of ereasing them 5740 # directly with set.update(deleted=True) 5741 5742 5743 # This object give access 5744 # to the adapter auto mailbox 5745 # mapped names (which native 5746 # mailbox has what table name) 5747 5748 imapdb.mailboxes <dict> # tablename, server native name pairs 5749 5750 # To retrieve a table native mailbox name use: 5751 imapdb.<table>.mailbox 5752 5753 ### New features v2.4.1: 5754 5755 # Declare mailboxes statically with tablename, name pairs 5756 # This avoids the extra server names retrieval 5757 5758 imapdb.define_tables({"inbox": "INBOX"}) 5759 5760 # Selects without content/attachments/email columns will only 5761 # fetch header and flags 5762 5763 imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject) 5764 """ 5765 5766 types = { 5767 'string': str, 5768 'text': str, 5769 'date': datetime.date, 5770 'datetime': datetime.datetime, 5771 'id': long, 5772 'boolean': bool, 5773 'integer': int, 5774 'bigint': long, 5775 'blob': str, 5776 'list:string': str, 5777 } 5778 5779 dbengine = 'imap' 5780 5781 REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$') 5782
5783 - def __init__(self, 5784 db, 5785 uri, 5786 pool_size=0, 5787 folder=None, 5788 db_codec ='UTF-8', 5789 credential_decoder=IDENTITY, 5790 driver_args={}, 5791 adapter_args={}, 5792 do_connect=True, 5793 after_connection=None):
5794 5795 # db uri: user@example.com:password@imap.server.com:123 5796 # TODO: max size adapter argument for preventing large mail transfers 5797 5798 self.db = db 5799 self.uri = uri 5800 if do_connect: self.find_driver(adapter_args) 5801 self.pool_size=pool_size 5802 self.folder = folder 5803 self.db_codec = db_codec 5804 self._after_connection = after_connection 5805 self.credential_decoder = credential_decoder 5806 self.driver_args = driver_args 5807 self.adapter_args = adapter_args 5808 self.mailbox_size = None 5809 self.static_names = None 5810 self.charset = sys.getfilesystemencoding() 5811 # imap class 5812 self.imap4 = None 5813 uri = uri.split("://")[1] 5814 5815 """ MESSAGE is an identifier for sequence number""" 5816 5817 self.flags = ['\\Deleted', '\\Draft', '\\Flagged', 5818 '\\Recent', '\\Seen', '\\Answered'] 5819 self.search_fields = { 5820 'id': 'MESSAGE', 'created': 'DATE', 5821 'uid': 'UID', 'sender': 'FROM', 5822 'to': 'TO', 'cc': 'CC', 5823 'bcc': 'BCC', 'content': 'TEXT', 5824 'size': 'SIZE', 'deleted': '\\Deleted', 5825 'draft': '\\Draft', 'flagged': '\\Flagged', 5826 'recent': '\\Recent', 'seen': '\\Seen', 5827 'subject': 'SUBJECT', 'answered': '\\Answered', 5828 'mime': None, 'email': None, 5829 'attachments': None 5830 } 5831 5832 db['_lastsql'] = '' 5833 5834 m = self.REGEX_URI.match(uri) 5835 user = m.group('user') 5836 password = m.group('password') 5837 host = m.group('host') 5838 port = int(m.group('port')) 5839 over_ssl = False 5840 if port==993: 5841 over_ssl = True 5842 5843 driver_args.update(host=host,port=port, password=password, user=user) 5844 def connector(driver_args=driver_args): 5845 # it is assumed sucessful authentication alLways 5846 # TODO: support direct connection and login tests 5847 if over_ssl: 5848 self.imap4 = self.driver.IMAP4_SSL 5849 else: 5850 self.imap4 = self.driver.IMAP4 5851 connection = self.imap4(driver_args["host"], driver_args["port"]) 5852 data = connection.login(driver_args["user"], driver_args["password"]) 5853 5854 # static mailbox list 5855 connection.mailbox_names = None 5856 5857 # dummy cursor function 5858 connection.cursor = lambda : True 5859 5860 return connection
5861 5862 self.db.define_tables = self.define_tables 5863 self.connector = connector 5864 if do_connect: self.reconnect()
5865
5866 - def reconnect(self, f=None, cursor=True):
5867 """ 5868 IMAP4 Pool connection method 5869 5870 imap connection lacks of self cursor command. 5871 A custom command should be provided as a replacement 5872 for connection pooling to prevent uncaught remote session 5873 closing 5874 5875 """ 5876 if getattr(self,'connection',None) != None: 5877 return 5878 if f is None: 5879 f = self.connector 5880 5881 if not self.pool_size: 5882 self.connection = f() 5883 self.cursor = cursor and self.connection.cursor() 5884 else: 5885 POOLS = ConnectionPool.POOLS 5886 uri = self.uri 5887 while True: 5888 GLOBAL_LOCKER.acquire() 5889 if not uri in POOLS: 5890 POOLS[uri] = [] 5891 if POOLS[uri]: 5892 self.connection = POOLS[uri].pop() 5893 GLOBAL_LOCKER.release() 5894 self.cursor = cursor and self.connection.cursor() 5895 if self.cursor and self.check_active_connection: 5896 try: 5897 # check if connection is alive or close it 5898 result, data = self.connection.list() 5899 except: 5900 # Possible connection reset error 5901 # TODO: read exception class 5902 self.connection = f() 5903 break 5904 else: 5905 GLOBAL_LOCKER.release() 5906 self.connection = f() 5907 self.cursor = cursor and self.connection.cursor() 5908 break 5909 self.after_connection_hook()
5910
5911 - def get_last_message(self, tablename):
5912 last_message = None 5913 # request mailbox list to the server 5914 # if needed 5915 if not isinstance(self.connection.mailbox_names, dict): 5916 self.get_mailboxes() 5917 try: 5918 result = self.connection.select(self.connection.mailbox_names[tablename]) 5919 last_message = int(result[1][0]) 5920 except (IndexError, ValueError, TypeError, KeyError): 5921 e = sys.exc_info()[1] 5922 LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e)) 5923 return last_message
5924
5925 - def get_uid_bounds(self, tablename):
5926 if not isinstance(self.connection.mailbox_names, dict): 5927 self.get_mailboxes() 5928 # fetch first and last messages 5929 # return (first, last) messages uid's 5930 last_message = self.get_last_message(tablename) 5931 result, data = self.connection.uid("search", None, "(ALL)") 5932 uid_list = data[0].strip().split() 5933 if len(uid_list) <= 0: 5934 return None 5935 else: 5936 return (uid_list[0], uid_list[-1])
5937
5938 - def convert_date(self, date, add=None):
5939 if add is None: 5940 add = datetime.timedelta() 5941 """ Convert a date object to a string 5942 with d-Mon-Y style for IMAP or the inverse 5943 case 5944 5945 add <timedelta> adds to the date object 5946 """ 5947 months = [None, "Jan","Feb","Mar","Apr","May","Jun", 5948 "Jul", "Aug","Sep","Oct","Nov","Dec"] 5949 if isinstance(date, basestring): 5950 # Prevent unexpected date response format 5951 try: 5952 dayname, datestring = date.split(",") 5953 except (ValueError): 5954 LOGGER.debug("Could not parse date text: %s" % date) 5955 return None 5956 date_list = datestring.strip().split() 5957 year = int(date_list[2]) 5958 month = months.index(date_list[1]) 5959 day = int(date_list[0]) 5960 hms = map(int, date_list[3].split(":")) 5961 return datetime.datetime(year, month, day, 5962 hms[0], hms[1], hms[2]) + add 5963 elif isinstance(date, (datetime.datetime, datetime.date)): 5964 return (date + add).strftime("%d-%b-%Y") 5965 5966 else: 5967 return None
5968 5969 @staticmethod
5970 - def header_represent(f, r):
5971 from email.header import decode_header 5972 text, encoding = decode_header(f)[0] 5973 return text
5974
5975 - def encode_text(self, text, charset, errors="replace"):
5976 """ convert text for mail to unicode""" 5977 if text is None: 5978 text = "" 5979 else: 5980 if isinstance(text, str): 5981 if charset is None: 5982 text = unicode(text, "utf-8", errors) 5983 else: 5984 text = unicode(text, charset, errors) 5985 else: 5986 raise Exception("Unsupported mail text type %s" % type(text)) 5987 return text.encode("utf-8")
5988
5989 - def get_charset(self, message):
5990 charset = message.get_content_charset() 5991 return charset
5992
5993 - def get_mailboxes(self):
5994 """ Query the mail database for mailbox names """ 5995 if self.static_names: 5996 # statically defined mailbox names 5997 self.connection.mailbox_names = self.static_names 5998 return self.static_names.keys() 5999 6000 mailboxes_list = self.connection.list() 6001 self.connection.mailbox_names = dict() 6002 mailboxes = list() 6003 x = 0 6004 for item in mailboxes_list[1]: 6005 x = x + 1 6006 item = item.strip() 6007 if not "NOSELECT" in item.upper(): 6008 sub_items = item.split("\"") 6009 sub_items = [sub_item for sub_item in sub_items \ 6010 if len(sub_item.strip()) > 0] 6011 # mailbox = sub_items[len(sub_items) -1] 6012 mailbox = sub_items[-1] 6013 # remove unwanted characters and store original names 6014 # Don't allow leading non alphabetic characters 6015 mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox))) 6016 mailboxes.append(mailbox_name) 6017 self.connection.mailbox_names[mailbox_name] = mailbox 6018 6019 return mailboxes
6020
6021 - def get_query_mailbox(self, query):
6022 nofield = True 6023 tablename = None 6024 attr = query 6025 while nofield: 6026 if hasattr(attr, "first"): 6027 attr = attr.first 6028 if isinstance(attr, Field): 6029 return attr.tablename 6030 elif isinstance(attr, Query): 6031 pass 6032 else: 6033 return None 6034 else: 6035 return None 6036 return tablename
6037
6038 - def is_flag(self, flag):
6039 if self.search_fields.get(flag, None) in self.flags: 6040 return True 6041 else: 6042 return False
6043
6044 - def define_tables(self, mailbox_names=None):
6045 """ 6046 Auto create common IMAP fileds 6047 6048 This function creates fields definitions "statically" 6049 meaning that custom fields as in other adapters should 6050 not be supported and definitions handled on a service/mode 6051 basis (local syntax for Gmail(r), Ymail(r) 6052 6053 Returns a dictionary with tablename, server native mailbox name 6054 pairs. 6055 """ 6056 if mailbox_names: 6057 # optional statically declared mailboxes 6058 self.static_names = mailbox_names 6059 else: 6060 self.static_names = None 6061 if not isinstance(self.connection.mailbox_names, dict): 6062 self.get_mailboxes() 6063 6064 names = self.connection.mailbox_names.keys() 6065 6066 for name in names: 6067 self.db.define_table("%s" % name, 6068 Field("uid", "string", writable=False), 6069 Field("answered", "boolean"), 6070 Field("created", "datetime", writable=False), 6071 Field("content", "list:string", writable=False), 6072 Field("to", "string", writable=False), 6073 Field("cc", "string", writable=False), 6074 Field("bcc", "string", writable=False), 6075 Field("size", "integer", writable=False), 6076 Field("deleted", "boolean"), 6077 Field("draft", "boolean"), 6078 Field("flagged", "boolean"), 6079 Field("sender", "string", writable=False), 6080 Field("recent", "boolean", writable=False), 6081 Field("seen", "boolean"), 6082 Field("subject", "string", writable=False), 6083 Field("mime", "string", writable=False), 6084 Field("email", "string", writable=False, readable=False), 6085 Field("attachments", list, writable=False, readable=False), 6086 Field("encoding") 6087 ) 6088 6089 # Set a special _mailbox attribute for storing 6090 # native mailbox names 6091 self.db[name].mailbox = \ 6092 self.connection.mailbox_names[name] 6093 6094 # decode quoted printable 6095 self.db[name].to.represent = self.db[name].cc.represent = \ 6096 self.db[name].bcc.represent = self.db[name].sender.represent = \ 6097 self.db[name].subject.represent = self.header_represent 6098 6099 # Set the db instance mailbox collections 6100 self.db.mailboxes = self.connection.mailbox_names 6101 return self.db.mailboxes
6102
6103 - def create_table(self, *args, **kwargs):
6104 # not implemented 6105 # but required by DAL 6106 pass
6107
6108 - def _select(self, query, fields, attributes):
6109 if use_common_filters(query): 6110 query = self.common_filter(query, [self.get_query_mailbox(query),]) 6111 return str(query)
6112
6113 - def select(self, query, fields, attributes):
6114 """ Search and Fetch records and return web2py rows 6115 """ 6116 # move this statement elsewhere (upper-level) 6117 if use_common_filters(query): 6118 query = self.common_filter(query, [self.get_query_mailbox(query),]) 6119 6120 import email 6121 # get records from imap server with search + fetch 6122 # convert results to a dictionary 6123 tablename = None 6124 fetch_results = list() 6125 6126 if isinstance(query, Query): 6127 tablename = self.get_table(query) 6128 mailbox = self.connection.mailbox_names.get(tablename, None) 6129 if mailbox is None: 6130 raise ValueError("Mailbox name not found: %s" % mailbox) 6131 else: 6132 # select with readonly 6133 result, selected = self.connection.select(mailbox, True) 6134 if result != "OK": 6135 raise Exception("IMAP error: %s" % selected) 6136 self.mailbox_size = int(selected[0]) 6137 search_query = "(%s)" % str(query).strip() 6138 search_result = self.connection.uid("search", None, search_query) 6139 # Normal IMAP response OK is assumed (change this) 6140 if search_result[0] == "OK": 6141 # For "light" remote server responses just get the first 6142 # ten records (change for non-experimental implementation) 6143 # However, light responses are not guaranteed with this 6144 # approach, just fewer messages. 6145 limitby = attributes.get('limitby', None) 6146 messages_set = search_result[1][0].split() 6147 # descending order 6148 messages_set.reverse() 6149 if limitby is not None: 6150 # TODO: orderby, asc/desc, limitby from complete message set 6151 messages_set = messages_set[int(limitby[0]):int(limitby[1])] 6152 6153 # keep the requests small for header/flags 6154 if any([(field.name in ["content", "size", 6155 "attachments", "email"]) for 6156 field in fields]): 6157 imap_fields = "(RFC822 FLAGS)" 6158 else: 6159 imap_fields = "(RFC822.HEADER FLAGS)" 6160 6161 if len(messages_set) > 0: 6162 # create fetch results object list 6163 # fetch each remote message and store it in memmory 6164 # (change to multi-fetch command syntax for faster 6165 # transactions) 6166 for uid in messages_set: 6167 # fetch the RFC822 message body 6168 typ, data = self.connection.uid("fetch", uid, imap_fields) 6169 if typ == "OK": 6170 fr = {"message": int(data[0][0].split()[0]), 6171 "uid": int(uid), 6172 "email": email.message_from_string(data[0][1]), 6173 "raw_message": data[0][1]} 6174 fr["multipart"] = fr["email"].is_multipart() 6175 # fetch flags for the message 6176 fr["flags"] = self.driver.ParseFlags(data[1]) 6177 fetch_results.append(fr) 6178 else: 6179 # error retrieving the message body 6180 raise Exception("IMAP error retrieving the body: %s" % data) 6181 else: 6182 raise Exception("IMAP search error: %s" % search_result[1]) 6183 elif isinstance(query, (Expression, basestring)): 6184 raise NotImplementedError() 6185 else: 6186 raise TypeError("Unexpected query type") 6187 6188 imapqry_dict = {} 6189 imapfields_dict = {} 6190 6191 if len(fields) == 1 and isinstance(fields[0], SQLALL): 6192 allfields = True 6193 elif len(fields) == 0: 6194 allfields = True 6195 else: 6196 allfields = False 6197 if allfields: 6198 colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()] 6199 else: 6200 colnames = ["%s.%s" % (tablename, field.name) for field in fields] 6201 6202 for k in colnames: 6203 imapfields_dict[k] = k 6204 6205 imapqry_list = list() 6206 imapqry_array = list() 6207 for fr in fetch_results: 6208 attachments = [] 6209 content = [] 6210 size = 0 6211 n = int(fr["message"]) 6212 item_dict = dict() 6213 message = fr["email"] 6214 uid = fr["uid"] 6215 charset = self.get_charset(message) 6216 flags = fr["flags"] 6217 raw_message = fr["raw_message"] 6218 # Return messages data mapping static fields 6219 # and fetched results. Mapping should be made 6220 # outside the select function (with auxiliary 6221 # instance methods) 6222 6223 # pending: search flags states trough the email message 6224 # instances for correct output 6225 6226 # preserve subject encoding (ASCII/quoted printable) 6227 6228 if "%s.id" % tablename in colnames: 6229 item_dict["%s.id" % tablename] = n 6230 if "%s.created" % tablename in colnames: 6231 item_dict["%s.created" % tablename] = self.convert_date(message["Date"]) 6232 if "%s.uid" % tablename in colnames: 6233 item_dict["%s.uid" % tablename] = uid 6234 if "%s.sender" % tablename in colnames: 6235 # If there is no encoding found in the message header 6236 # force utf-8 replacing characters (change this to 6237 # module's defaults). Applies to .sender, .to, .cc and .bcc fields 6238 item_dict["%s.sender" % tablename] = message["From"] 6239 if "%s.to" % tablename in colnames: 6240 item_dict["%s.to" % tablename] = message["To"] 6241 if "%s.cc" % tablename in colnames: 6242 if "Cc" in message.keys(): 6243 item_dict["%s.cc" % tablename] = message["Cc"] 6244 else: 6245 item_dict["%s.cc" % tablename] = "" 6246 if "%s.bcc" % tablename in colnames: 6247 if "Bcc" in message.keys(): 6248 item_dict["%s.bcc" % tablename] = message["Bcc"] 6249 else: 6250 item_dict["%s.bcc" % tablename] = "" 6251 if "%s.deleted" % tablename in colnames: 6252 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags 6253 if "%s.draft" % tablename in colnames: 6254 item_dict["%s.draft" % tablename] = "\\Draft" in flags 6255 if "%s.flagged" % tablename in colnames: 6256 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags 6257 if "%s.recent" % tablename in colnames: 6258 item_dict["%s.recent" % tablename] = "\\Recent" in flags 6259 if "%s.seen" % tablename in colnames: 6260 item_dict["%s.seen" % tablename] = "\\Seen" in flags 6261 if "%s.subject" % tablename in colnames: 6262 item_dict["%s.subject" % tablename] = message["Subject"] 6263 if "%s.answered" % tablename in colnames: 6264 item_dict["%s.answered" % tablename] = "\\Answered" in flags 6265 if "%s.mime" % tablename in colnames: 6266 item_dict["%s.mime" % tablename] = message.get_content_type() 6267 if "%s.encoding" % tablename in colnames: 6268 item_dict["%s.encoding" % tablename] = charset 6269 6270 # Here goes the whole RFC822 body as an email instance 6271 # for controller side custom processing 6272 # The message is stored as a raw string 6273 # >> email.message_from_string(raw string) 6274 # returns a Message object for enhanced object processing 6275 if "%s.email" % tablename in colnames: 6276 # WARNING: no encoding performed (raw message) 6277 item_dict["%s.email" % tablename] = raw_message 6278 6279 # Size measure as suggested in a Velocity Reviews post 6280 # by Tim Williams: "how to get size of email attachment" 6281 # Note: len() and server RFC822.SIZE reports doesn't match 6282 # To retrieve the server size for representation would add a new 6283 # fetch transaction to the process 6284 for part in message.walk(): 6285 maintype = part.get_content_maintype() 6286 if ("%s.attachments" % tablename in colnames) or \ 6287 ("%s.content" % tablename in colnames): 6288 if "%s.attachments" % tablename in colnames: 6289 if not ("text" in maintype): 6290 payload = part.get_payload(decode=True) 6291 if payload: 6292 attachment = { 6293 "payload": payload, 6294 "filename": part.get_filename(), 6295 "encoding": part.get_content_charset(), 6296 "mime": part.get_content_type(), 6297 "disposition": part["Content-Disposition"]} 6298 attachments.append(attachment) 6299 if "%s.content" % tablename in colnames: 6300 payload = part.get_payload(decode=True) 6301 part_charset = self.get_charset(part) 6302 if "text" in maintype: 6303 if payload: 6304 content.append(self.encode_text(payload, part_charset)) 6305 if "%s.size" % tablename in colnames: 6306 if part is not None: 6307 size += len(str(part)) 6308 item_dict["%s.content" % tablename] = bar_encode(content) 6309 item_dict["%s.attachments" % tablename] = attachments 6310 item_dict["%s.size" % tablename] = size 6311 imapqry_list.append(item_dict) 6312 6313 # extra object mapping for the sake of rows object 6314 # creation (sends an array or lists) 6315 for item_dict in imapqry_list: 6316 imapqry_array_item = list() 6317 for fieldname in colnames: 6318 imapqry_array_item.append(item_dict[fieldname]) 6319 imapqry_array.append(imapqry_array_item) 6320 6321 # parse result and return a rows object 6322 colnames = colnames 6323 processor = attributes.get('processor',self.parse) 6324 return processor(imapqry_array, fields, colnames)
6325
6326 - def _update(self, tablename, query, fields, commit=False):
6327 # TODO: the adapter should implement an .expand method 6328 commands = list() 6329 if use_common_filters(query): 6330 query = self.common_filter(query, [tablename,]) 6331 mark = [] 6332 unmark = [] 6333 if query: 6334 for item in fields: 6335 field = item[0] 6336 name = field.name 6337 value = item[1] 6338 if self.is_flag(name): 6339 flag = self.search_fields[name] 6340 if (value is not None) and (flag != "\\Recent"): 6341 if value: 6342 mark.append(flag) 6343 else: 6344 unmark.append(flag) 6345 result, data = self.connection.select( 6346 self.connection.mailbox_names[tablename]) 6347 string_query = "(%s)" % query 6348 result, data = self.connection.search(None, string_query) 6349 store_list = [item.strip() for item in data[0].split() 6350 if item.strip().isdigit()] 6351 # build commands for marked flags 6352 for number in store_list: 6353 result = None 6354 if len(mark) > 0: 6355 commands.append((number, "+FLAGS", "(%s)" % " ".join(mark))) 6356 if len(unmark) > 0: 6357 commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark))) 6358 return commands
6359
6360 - def update(self, tablename, query, fields):
6361 rowcount = 0 6362 commands = self._update(tablename, query, fields) 6363 for command in commands: 6364 result, data = self.connection.store(*command) 6365 if result == "OK": 6366 rowcount += 1 6367 else: 6368 raise Exception("IMAP storing error: %s" % data) 6369 return rowcount
6370
6371 - def _count(self, query, distinct=None):
6372 raise NotImplementedError()
6373
6374 - def count(self,query,distinct=None):
6375 counter = 0 6376 tablename = self.get_query_mailbox(query) 6377 if query and tablename is not None: 6378 if use_common_filters(query): 6379 query = self.common_filter(query, [tablename,]) 6380 result, data = self.connection.select(self.connection.mailbox_names[tablename]) 6381 string_query = "(%s)" % query 6382 result, data = self.connection.search(None, string_query) 6383 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()] 6384 counter = len(store_list) 6385 return counter
6386
6387 - def delete(self, tablename, query):
6388 counter = 0 6389 if query: 6390 if use_common_filters(query): 6391 query = self.common_filter(query, [tablename,]) 6392 result, data = self.connection.select(self.connection.mailbox_names[tablename]) 6393 string_query = "(%s)" % query 6394 result, data = self.connection.search(None, string_query) 6395 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()] 6396 for number in store_list: 6397 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)") 6398 if result == "OK": 6399 counter += 1 6400 else: 6401 raise Exception("IMAP store error: %s" % data) 6402 if counter > 0: 6403 result, data = self.connection.expunge() 6404 return counter
6405
6406 - def BELONGS(self, first, second):
6407 result = None 6408 name = self.search_fields[first.name] 6409 if name == "MESSAGE": 6410 values = [str(val) for val in second if str(val).isdigit()] 6411 result = "%s" % ",".join(values).strip() 6412 6413 elif name == "UID": 6414 values = [str(val) for val in second if str(val).isdigit()] 6415 result = "UID %s" % ",".join(values).strip() 6416 6417 else: 6418 raise Exception("Operation not supported") 6419 # result = "(%s %s)" % (self.expand(first), self.expand(second)) 6420 return result
6421
6422 - def CONTAINS(self, first, second, case_sensitive=False):
6423 # silently ignore, only case sensitive 6424 result = None 6425 name = self.search_fields[first.name] 6426 6427 if name in ("FROM", "TO", "SUBJECT", "TEXT"): 6428 result = "%s \"%s\"" % (name, self.expand(second)) 6429 else: 6430 if first.name in ("cc", "bcc"): 6431 result = "%s \"%s\"" % (first.name.upper(), self.expand(second)) 6432 elif first.name == "mime": 6433 result = "HEADER Content-Type \"%s\"" % self.expand(second) 6434 else: 6435 raise Exception("Operation not supported") 6436 return result
6437
6438 - def GT(self, first, second):
6439 result = None 6440 name = self.search_fields[first.name] 6441 if name == "MESSAGE": 6442 last_message = self.get_last_message(first.tablename) 6443 result = "%d:%d" % (int(self.expand(second)) + 1, last_message) 6444 elif name == "UID": 6445 # GT and LT may not return 6446 # expected sets depending on 6447 # the uid format implemented 6448 try: 6449 pedestal, threshold = self.get_uid_bounds(first.tablename) 6450 except TypeError: 6451 e = sys.exc_info()[1] 6452 LOGGER.debug("Error requesting uid bounds: %s", str(e)) 6453 return "" 6454 try: 6455 lower_limit = int(self.expand(second)) + 1 6456 except (ValueError, TypeError): 6457 e = sys.exc_info()[1] 6458 raise Exception("Operation not supported (non integer UID)") 6459 result = "UID %s:%s" % (lower_limit, threshold) 6460 elif name == "DATE": 6461 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1)) 6462 elif name == "SIZE": 6463 result = "LARGER %s" % self.expand(second) 6464 else: 6465 raise Exception("Operation not supported") 6466 return result
6467
6468 - def GE(self, first, second):
6469 result = None 6470 name = self.search_fields[first.name] 6471 if name == "MESSAGE": 6472 last_message = self.get_last_message(first.tablename) 6473 result = "%s:%s" % (self.expand(second), last_message) 6474 elif name == "UID": 6475 # GT and LT may not return 6476 # expected sets depending on 6477 # the uid format implemented 6478 try: 6479 pedestal, threshold = self.get_uid_bounds(first.tablename) 6480 except TypeError: 6481 e = sys.exc_info()[1] 6482 LOGGER.debug("Error requesting uid bounds: %s", str(e)) 6483 return "" 6484 lower_limit = self.expand(second) 6485 result = "UID %s:%s" % (lower_limit, threshold) 6486 elif name == "DATE": 6487 result = "SINCE %s" % self.convert_date(second) 6488 else: 6489 raise Exception("Operation not supported") 6490 return result
6491
6492 - def LT(self, first, second):
6493 result = None 6494 name = self.search_fields[first.name] 6495 if name == "MESSAGE": 6496 result = "%s:%s" % (1, int(self.expand(second)) - 1) 6497 elif name == "UID": 6498 try: 6499 pedestal, threshold = self.get_uid_bounds(first.tablename) 6500 except TypeError: 6501 e = sys.exc_info()[1] 6502 LOGGER.debug("Error requesting uid bounds: %s", str(e)) 6503 return "" 6504 try: 6505 upper_limit = int(self.expand(second)) - 1 6506 except (ValueError, TypeError): 6507 e = sys.exc_info()[1] 6508 raise Exception("Operation not supported (non integer UID)") 6509 result = "UID %s:%s" % (pedestal, upper_limit) 6510 elif name == "DATE": 6511 result = "BEFORE %s" % self.convert_date(second) 6512 elif name == "SIZE": 6513 result = "SMALLER %s" % self.expand(second) 6514 else: 6515 raise Exception("Operation not supported") 6516 return result
6517
6518 - def LE(self, first, second):
6519 result = None 6520 name = self.search_fields[first.name] 6521 if name == "MESSAGE": 6522 result = "%s:%s" % (1, self.expand(second)) 6523 elif name == "UID": 6524 try: 6525 pedestal, threshold = self.get_uid_bounds(first.tablename) 6526 except TypeError: 6527 e = sys.exc_info()[1] 6528 LOGGER.debug("Error requesting uid bounds: %s", str(e)) 6529 return "" 6530 upper_limit = int(self.expand(second)) 6531 result = "UID %s:%s" % (pedestal, upper_limit) 6532 elif name == "DATE": 6533 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1)) 6534 else: 6535 raise Exception("Operation not supported") 6536 return result
6537
6538 - def NE(self, first, second=None):
6539 if (second is None) and isinstance(first, Field): 6540 # All records special table query 6541 if first.type == "id": 6542 return self.GE(first, 1) 6543 result = self.NOT(self.EQ(first, second)) 6544 result = result.replace("NOT NOT", "").strip() 6545 return result
6546
6547 - def EQ(self,first,second):
6548 name = self.search_fields[first.name] 6549 result = None 6550 if name is not None: 6551 if name == "MESSAGE": 6552 # query by message sequence number 6553 result = "%s" % self.expand(second) 6554 elif name == "UID": 6555 result = "UID %s" % self.expand(second) 6556 elif name == "DATE": 6557 result = "ON %s" % self.convert_date(second) 6558 6559 elif name in self.flags: 6560 if second: 6561 result = "%s" % (name.upper()[1:]) 6562 else: 6563 result = "NOT %s" % (name.upper()[1:]) 6564 else: 6565 raise Exception("Operation not supported") 6566 else: 6567 raise Exception("Operation not supported") 6568 return result
6569
6570 - def AND(self, first, second):
6571 result = "%s %s" % (self.expand(first), self.expand(second)) 6572 return result
6573
6574 - def OR(self, first, second):
6575 result = "OR %s %s" % (self.expand(first), self.expand(second)) 6576 return "%s" % result.replace("OR OR", "OR")
6577
6578 - def NOT(self, first):
6579 result = "NOT %s" % self.expand(first) 6580 return result
6581 6582 ######################################################################## 6583 # end of adapters 6584 ######################################################################## 6585 6586 ADAPTERS = { 6587 'sqlite': SQLiteAdapter, 6588 'spatialite': SpatiaLiteAdapter, 6589 'sqlite:memory': SQLiteAdapter, 6590 'spatialite:memory': SpatiaLiteAdapter, 6591 'mysql': MySQLAdapter, 6592 'postgres': PostgreSQLAdapter, 6593 'postgres:psycopg2': PostgreSQLAdapter, 6594 'postgres:pg8000': PostgreSQLAdapter, 6595 'postgres2:psycopg2': NewPostgreSQLAdapter, 6596 'postgres2:pg8000': NewPostgreSQLAdapter, 6597 'oracle': OracleAdapter, 6598 'mssql': MSSQLAdapter, 6599 'mssql2': MSSQL2Adapter, 6600 'mssql3': MSSQL3Adapter, 6601 'sybase': SybaseAdapter, 6602 'db2': DB2Adapter, 6603 'teradata': TeradataAdapter, 6604 'informix': InformixAdapter, 6605 'informix-se': InformixSEAdapter, 6606 'firebird': FireBirdAdapter, 6607 'firebird_embedded': FireBirdAdapter, 6608 'ingres': IngresAdapter, 6609 'ingresu': IngresUnicodeAdapter, 6610 'sapdb': SAPDBAdapter, 6611 'cubrid': CubridAdapter, 6612 'jdbc:sqlite': JDBCSQLiteAdapter, 6613 'jdbc:sqlite:memory': JDBCSQLiteAdapter, 6614 'jdbc:postgres': JDBCPostgreSQLAdapter, 6615 'gae': GoogleDatastoreAdapter, # discouraged, for backward compatibility 6616 'google:datastore': GoogleDatastoreAdapter, 6617 'google:sql': GoogleSQLAdapter, 6618 'couchdb': CouchDBAdapter, 6619 'mongodb': MongoDBAdapter, 6620 'imap': IMAPAdapter 6621 }
6622 6623 6624 -def sqlhtml_validators(field):
6625 """ 6626 Field type validation, using web2py's validators mechanism. 6627 6628 makes sure the content of a field is in line with the declared 6629 fieldtype 6630 """ 6631 db = field.db 6632 if not have_validators: 6633 return [] 6634 field_type, field_length = field.type, field.length 6635 if isinstance(field_type, SQLCustomType): 6636 if hasattr(field_type, 'validator'): 6637 return field_type.validator 6638 else: 6639 field_type = field_type.type 6640 elif not isinstance(field_type,str): 6641 return [] 6642 requires=[] 6643 def ff(r,id): 6644 row=r(id) 6645 if not row: 6646 return id 6647 elif hasattr(r, '_format') and isinstance(r._format,str): 6648 return r._format % row 6649 elif hasattr(r, '_format') and callable(r._format): 6650 return r._format(row) 6651 else: 6652 return id
6653 if field_type in (('string', 'text', 'password')): 6654 requires.append(validators.IS_LENGTH(field_length)) 6655 elif field_type == 'json': 6656 requires.append(validators.IS_EMPTY_OR(validators.IS_JSON())) 6657 elif field_type == 'double' or field_type == 'float': 6658 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100)) 6659 elif field_type in ('integer','bigint'): 6660 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100)) 6661 elif field_type.startswith('decimal'): 6662 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10)) 6663 elif field_type == 'date': 6664 requires.append(validators.IS_DATE()) 6665 elif field_type == 'time': 6666 requires.append(validators.IS_TIME()) 6667 elif field_type == 'datetime': 6668 requires.append(validators.IS_DATETIME()) 6669 elif db and field_type.startswith('reference') and \ 6670 field_type.find('.') < 0 and \ 6671 field_type[10:] in db.tables: 6672 referenced = db[field_type[10:]] 6673 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id) 6674 field.represent = field.represent or repr_ref 6675 if hasattr(referenced, '_format') and referenced._format: 6676 requires = validators.IS_IN_DB(db,referenced._id, 6677 referenced._format) 6678 if field.unique: 6679 requires._and = validators.IS_NOT_IN_DB(db,field) 6680 if field.tablename == field_type[10:]: 6681 return validators.IS_EMPTY_OR(requires) 6682 return requires 6683 elif db and field_type.startswith('list:reference') and \ 6684 field_type.find('.') < 0 and \ 6685 field_type[15:] in db.tables: 6686 referenced = db[field_type[15:]] 6687 def list_ref_repr(ids, row=None, r=referenced, f=ff): 6688 if not ids: 6689 return None 6690 refs = None 6691 db, id = r._db, r._id 6692 if isinstance(db._adapter, GoogleDatastoreAdapter): 6693 def count(values): return db(id.belongs(values)).select(id) 6694 rx = range(0, len(ids), 30) 6695 refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx]) 6696 else: 6697 refs = db(id.belongs(ids)).select(id) 6698 return (refs and ', '.join(str(f(r,x.id)) for x in refs) or '') 6699 field.represent = field.represent or list_ref_repr 6700 if hasattr(referenced, '_format') and referenced._format: 6701 requires = validators.IS_IN_DB(db,referenced._id, 6702 referenced._format,multiple=True) 6703 else: 6704 requires = validators.IS_IN_DB(db,referenced._id, 6705 multiple=True) 6706 if field.unique: 6707 requires._and = validators.IS_NOT_IN_DB(db,field) 6708 return requires 6709 elif field_type.startswith('list:'): 6710 def repr_list(values,row=None): return', '.join(str(v) for v in (values or [])) 6711 field.represent = field.represent or repr_list 6712 if field.unique: 6713 requires.insert(0,validators.IS_NOT_IN_DB(db,field)) 6714 sff = ['in', 'do', 'da', 'ti', 'de', 'bo'] 6715 if field.notnull and not field_type[:2] in sff: 6716 requires.insert(0, validators.IS_NOT_EMPTY()) 6717 elif not field.notnull and field_type[:2] in sff and requires: 6718 requires[-1] = validators.IS_EMPTY_OR(requires[-1]) 6719 return requires 6720
6721 6722 -def bar_escape(item):
6723 return str(item).replace('|', '||')
6724
6725 -def bar_encode(items):
6726 return '|%s|' % '|'.join(bar_escape(item) for item in items if str(item).strip())
6727
6728 -def bar_decode_integer(value):
6729 if not hasattr(value,'split') and hasattr(value,'read'): 6730 value = value.read() 6731 return [int(x) for x in value.split('|') if x.strip()]
6732
6733 -def bar_decode_string(value):
6734 return [x.replace('||', '|') for x in 6735 REGEX_UNPACK.split(value[1:-1]) if x.strip()]
6736
6737 6738 -class Row(object):
6739 6740 """ 6741 a dictionary that lets you do d['a'] as well as d.a 6742 this is only used to store a Row 6743 """ 6744
6745 - def __init__(self,*args,**kwargs):
6746 self.__dict__.update(*args,**kwargs)
6747
6748 - def __getitem__(self, key):
6749 key=str(key) 6750 m = REGEX_TABLE_DOT_FIELD.match(key) 6751 if key in self.get('_extra',{}): 6752 return self._extra[key] 6753 elif m: 6754 try: 6755 return ogetattr(self, m.group(1))[m.group(2)] 6756 except (KeyError,AttributeError,TypeError): 6757 key = m.group(2) 6758 return ogetattr(self, key)
6759
6760 - def __setitem__(self, key, value):
6761 setattr(self, str(key), value)
6762 6763 __delitem__ = delattr 6764 6765 __copy__ = lambda self: Row(self) 6766 6767 __call__ = __getitem__ 6768
6769 - def get(self,key,default=None):
6770 return self.__dict__.get(key,default)
6771
6772 - def __contains__(self,key):
6773 return key in self.__dict__
6774 6775 has_key = __contains__ 6776
6777 - def __nonzero__(self):
6778 return len(self.__dict__)>0
6779
6780 - def update(self, *args, **kwargs):
6781 self.__dict__.update(*args, **kwargs)
6782
6783 - def keys(self):
6784 return self.__dict__.keys()
6785
6786 - def items(self):
6787 return self.__dict__.items()
6788
6789 - def values(self):
6790 return self.__dict__.values()
6791
6792 - def __iter__(self):
6793 return self.__dict__.__iter__()
6794
6795 - def iteritems(self):
6796 return self.__dict__.iteritems()
6797
6798 - def __str__(self):
6799 ### this could be made smarter 6800 return '<Row %s>' % self.as_dict()
6801
6802 - def __repr__(self):
6803 return '<Row %s>' % self.as_dict()
6804
6805 - def __int__(self):
6806 return object.__getattribute__(self,'id')
6807
6808 - def __eq__(self,other):
6809 try: 6810 return self.as_dict() == other.as_dict() 6811 except AttributeError: 6812 return False
6813
6814 - def __ne__(self,other):
6815 return not (self == other)
6816
6817 - def __copy__(self):
6818 return Row(dict(self))
6819
6820 - def as_dict(self, datetime_to_str=False, custom_types=None):
6821 SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict] 6822 if isinstance(custom_types,(list,tuple,set)): 6823 SERIALIZABLE_TYPES += custom_types 6824 elif custom_types: 6825 SERIALIZABLE_TYPES.append(custom_types) 6826 d = dict(self) 6827 for k in copy.copy(d.keys()): 6828 v=d[k] 6829 if d[k] is None: 6830 continue 6831 elif isinstance(v,Row): 6832 d[k]=v.as_dict() 6833 elif isinstance(v,Reference): 6834 d[k]=int(v) 6835 elif isinstance(v,decimal.Decimal): 6836 d[k]=float(v) 6837 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)): 6838 if datetime_to_str: 6839 d[k] = v.isoformat().replace('T',' ')[:19] 6840 elif not isinstance(v,tuple(SERIALIZABLE_TYPES)): 6841 del d[k] 6842 return d
6843
6844 - def as_xml(self, row_name="row", colnames=None, indent=' '):
6845 def f(row,field,indent=' '): 6846 if isinstance(row,Row): 6847 spc = indent+' \n' 6848 items = [f(row[x],x,indent+' ') for x in row] 6849 return '%s<%s>\n%s\n%s</%s>' % ( 6850 indent, 6851 field, 6852 spc.join(item for item in items if item), 6853 indent, 6854 field) 6855 elif not callable(row): 6856 if REGEX_ALPHANUMERIC.match(field): 6857 return '%s<%s>%s</%s>' % (indent,field,row,field) 6858 else: 6859 return '%s<extra name="%s">%s</extra>' % \ 6860 (indent,field,row) 6861 else: 6862 return None
6863 return f(self, row_name, indent=indent)
6864
6865 - def as_json(self, mode="object", default=None, colnames=None, 6866 serialize=True, **kwargs):
6867 """ 6868 serializes the table to a JSON list of objects 6869 kwargs are passed to .as_dict method 6870 only "object" mode supported for single row 6871 6872 serialize = False used by Rows.as_json 6873 TODO: return array mode with query column order 6874 """ 6875 6876 def inner_loop(record, col): 6877 (t, f) = col.split('.') 6878 res = None 6879 if not REGEX_TABLE_DOT_FIELD.match(col): 6880 key = col 6881 res = record._extra[col] 6882 else: 6883 key = f 6884 if isinstance(record.get(t, None), Row): 6885 res = record[t][f] 6886 else: 6887 res = record[f] 6888 if mode == 'object': 6889 return (key, res) 6890 else: 6891 return res
6892 6893 multi = any([isinstance(v, self.__class__) for v in self.values()]) 6894 mode = mode.lower() 6895 if not mode in ['object', 'array']: 6896 raise SyntaxError('Invalid JSON serialization mode: %s' % mode) 6897 6898 if mode=='object' and colnames: 6899 item = dict([inner_loop(self, col) for col in colnames]) 6900 elif colnames: 6901 item = [inner_loop(self, col) for col in colnames] 6902 else: 6903 if not mode == 'object': 6904 raise SyntaxError('Invalid JSON serialization mode: %s' % mode) 6905 6906 if multi: 6907 item = dict() 6908 [item.update(**v.as_dict(**kwargs)) for v in self.values()] 6909 else: 6910 item = self.as_dict(**kwargs) 6911 6912 if serialize: 6913 if have_serializers: 6914 return serializers.json(item, 6915 default=default or 6916 serializers.custom_json) 6917 elif simplejson: 6918 return simplejson.dumps(item) 6919 else: 6920 raise RuntimeError("missing simplejson") 6921 else: 6922 return item 6923
6924 6925 ################################################################################ 6926 # Everything below should be independent of the specifics of the database 6927 # and should work for RDBMs and some NoSQL databases 6928 ################################################################################ 6929 6930 -class SQLCallableList(list):
6931 - def __call__(self):
6932 return copy.copy(self)
6933
6934 -def smart_query(fields,text):
6935 if not isinstance(fields,(list,tuple)): 6936 fields = [fields] 6937 new_fields = [] 6938 for field in fields: 6939 if isinstance(field,Field): 6940 new_fields.append(field) 6941 elif isinstance(field,Table): 6942 for ofield in field: 6943 new_fields.append(ofield) 6944 else: 6945 raise RuntimeError("fields must be a list of fields") 6946 fields = new_fields 6947 field_map = {} 6948 for field in fields: 6949 n = field.name.lower() 6950 if not n in field_map: 6951 field_map[n] = field 6952 n = str(field).lower() 6953 if not n in field_map: 6954 field_map[n] = field 6955 constants = {} 6956 i = 0 6957 while True: 6958 m = REGEX_CONST_STRING.search(text) 6959 if not m: break 6960 text = text[:m.start()]+('#%i' % i)+text[m.end():] 6961 constants[str(i)] = m.group()[1:-1] 6962 i+=1 6963 text = re.sub('\s+',' ',text).lower() 6964 for a,b in [('&','and'), 6965 ('|','or'), 6966 ('~','not'), 6967 ('==','='), 6968 ('<','<'), 6969 ('>','>'), 6970 ('<=','<='), 6971 ('>=','>='), 6972 ('<>','!='), 6973 ('=<','<='), 6974 ('=>','>='), 6975 ('=','='), 6976 (' less or equal than ','<='), 6977 (' greater or equal than ','>='), 6978 (' equal or less than ','<='), 6979 (' equal or greater than ','>='), 6980 (' less or equal ','<='), 6981 (' greater or equal ','>='), 6982 (' equal or less ','<='), 6983 (' equal or greater ','>='), 6984 (' not equal to ','!='), 6985 (' not equal ','!='), 6986 (' equal to ','='), 6987 (' equal ','='), 6988 (' equals ','='), 6989 (' less than ','<'), 6990 (' greater than ','>'), 6991 (' starts with ','startswith'), 6992 (' ends with ','endswith'), 6993 (' not in ' , 'notbelongs'), 6994 (' in ' , 'belongs'), 6995 (' is ','=')]: 6996 if a[0]==' ': 6997 text = text.replace(' is'+a,' %s ' % b) 6998 text = text.replace(a,' %s ' % b) 6999 text = re.sub('\s+',' ',text).lower() 7000 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text) 7001 query = field = neg = op = logic = None 7002 for item in text.split(): 7003 if field is None: 7004 if item == 'not': 7005 neg = True 7006 elif not neg and not logic and item in ('and','or'): 7007 logic = item 7008 elif item in field_map: 7009 field = field_map[item] 7010 else: 7011 raise RuntimeError("Invalid syntax") 7012 elif not field is None and op is None: 7013 op = item 7014 elif not op is None: 7015 if item.startswith('#'): 7016 if not item[1:] in constants: 7017 raise RuntimeError("Invalid syntax") 7018 value = constants[item[1:]] 7019 else: 7020 value = item 7021 if field.type in ('text', 'string', 'json'): 7022 if op == '=': op = 'like' 7023 if op == '=': new_query = field==value 7024 elif op == '<': new_query = field<value 7025 elif op == '>': new_query = field>value 7026 elif op == '<=': new_query = field<=value 7027 elif op == '>=': new_query = field>=value 7028 elif op == '!=': new_query = field!=value 7029 elif op == 'belongs': new_query = field.belongs(value.split(',')) 7030 elif op == 'notbelongs': new_query = ~field.belongs(value.split(',')) 7031 elif field.type in ('text', 'string', 'json'): 7032 if op == 'contains': new_query = field.contains(value) 7033 elif op == 'like': new_query = field.like(value) 7034 elif op == 'startswith': new_query = field.startswith(value) 7035 elif op == 'endswith': new_query = field.endswith(value) 7036 else: raise RuntimeError("Invalid operation") 7037 elif field._db._adapter.dbengine=='google:datastore' and \ 7038 field.type in ('list:integer', 'list:string', 'list:reference'): 7039 if op == 'contains': new_query = field.contains(value) 7040 else: raise RuntimeError("Invalid operation") 7041 else: raise RuntimeError("Invalid operation") 7042 if neg: new_query = ~new_query 7043 if query is None: 7044 query = new_query 7045 elif logic == 'and': 7046 query &= new_query 7047 elif logic == 'or': 7048 query |= new_query 7049 field = op = neg = logic = None 7050 return query
7051
7052 -class DAL(object):
7053 7054 """ 7055 an instance of this class represents a database connection 7056 7057 Example:: 7058 7059 db = DAL('sqlite://test.db') 7060 7061 or 7062 7063 db = DAL({"uri": ..., "items": ...}) # experimental 7064 7065 db.define_table('tablename', Field('fieldname1'), 7066 Field('fieldname2')) 7067 """ 7068
7069 - def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
7070 if not hasattr(THREAD_LOCAL,'db_instances'): 7071 THREAD_LOCAL.db_instances = {} 7072 if not hasattr(THREAD_LOCAL,'db_instances_zombie'): 7073 THREAD_LOCAL.db_instances_zombie = {} 7074 if uri == '<zombie>': 7075 db_uid = kwargs['db_uid'] # a zombie must have a db_uid! 7076 if db_uid in THREAD_LOCAL.db_instances: 7077 db_group = THREAD_LOCAL.db_instances[db_uid] 7078 db = db_group[-1] 7079 elif db_uid in THREAD_LOCAL.db_instances_zombie: 7080 db = THREAD_LOCAL.db_instances_zombie[db_uid] 7081 else: 7082 db = super(DAL, cls).__new__(cls) 7083 THREAD_LOCAL.db_instances_zombie[db_uid] = db 7084 else: 7085 db_uid = kwargs.get('db_uid',hashlib_md5(repr(uri)).hexdigest()) 7086 if db_uid in THREAD_LOCAL.db_instances_zombie: 7087 db = THREAD_LOCAL.db_instances_zombie[db_uid] 7088 del THREAD_LOCAL.db_instances_zombie[db_uid] 7089 else: 7090 db = super(DAL, cls).__new__(cls) 7091 db_group = THREAD_LOCAL.db_instances.get(db_uid,[]) 7092 db_group.append(db) 7093 THREAD_LOCAL.db_instances[db_uid] = db_group 7094 db._db_uid = db_uid 7095 return db
7096 7097 @staticmethod
7098 - def set_folder(folder):
7099 """ 7100 # ## this allows gluon to set a folder for this thread 7101 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py 7102 """ 7103 BaseAdapter.set_folder(folder)
7104 7105 @staticmethod
7106 - def get_instances():
7107 """ 7108 Returns a dictionary with uri as key with timings and defined tables 7109 {'sqlite://storage.sqlite': { 7110 'dbstats': [(select auth_user.email from auth_user, 0.02009)], 7111 'dbtables': { 7112 'defined': ['auth_cas', 'auth_event', 'auth_group', 7113 'auth_membership', 'auth_permission', 'auth_user'], 7114 'lazy': '[]' 7115 } 7116 } 7117 } 7118 """ 7119 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items() 7120 infos = {} 7121 for db_uid, db_group in dbs: 7122 for db in db_group: 7123 if not db._uri: 7124 continue 7125 k = hide_password(db._uri) 7126 infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings], 7127 dbtables = {'defined': 7128 sorted(list(set(db.tables) - 7129 set(db._LAZY_TABLES.keys()))), 7130 'lazy': sorted(db._LAZY_TABLES.keys())} 7131 ) 7132 return infos
7133 7134 @staticmethod
7135 - def distributed_transaction_begin(*instances):
7136 if not instances: 7137 return 7138 thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread()) 7139 keys = ['%s.%i' % (thread_key, i) for (i,db) in instances] 7140 instances = enumerate(instances) 7141 for (i, db) in instances: 7142 if not db._adapter.support_distributed_transaction(): 7143 raise SyntaxError( 7144 'distributed transaction not suported by %s' % db._dbname) 7145 for (i, db) in instances: 7146 db._adapter.distributed_transaction_begin(keys[i])
7147 7148 @staticmethod
7149 - def distributed_transaction_commit(*instances):
7150 if not instances: 7151 return 7152 instances = enumerate(instances) 7153 thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread()) 7154 keys = ['%s.%i' % (thread_key, i) for (i,db) in instances] 7155 for (i, db) in instances: 7156 if not db._adapter.support_distributed_transaction(): 7157 raise SyntaxError( 7158 'distributed transaction not suported by %s' % db._dbanme) 7159 try: 7160 for (i, db) in instances: 7161 db._adapter.prepare(keys[i]) 7162 except: 7163 for (i, db) in instances: 7164 db._adapter.rollback_prepared(keys[i]) 7165 raise RuntimeError('failure to commit distributed transaction') 7166 else: 7167 for (i, db) in instances: 7168 db._adapter.commit_prepared(keys[i]) 7169 return
7170
7171 - def __init__(self, uri=DEFAULT_URI, 7172 pool_size=0, folder=None, 7173 db_codec='UTF-8', check_reserved=None, 7174 migrate=True, fake_migrate=False, 7175 migrate_enabled=True, fake_migrate_all=False, 7176 decode_credentials=False, driver_args=None, 7177 adapter_args=None, attempts=5, auto_import=False, 7178 bigint_id=False,debug=False,lazy_tables=False, 7179 db_uid=None, do_connect=True, after_connection=None):
7180 """ 7181 Creates a new Database Abstraction Layer instance. 7182 7183 Keyword arguments: 7184 7185 :uri: string that contains information for connecting to a database. 7186 (default: 'sqlite://dummy.db') 7187 7188 experimental: you can specify a dictionary as uri 7189 parameter i.e. with 7190 db = DAL({"uri": "sqlite://storage.sqlite", 7191 "items": {...}, ...}) 7192 7193 for an example of dict input you can check the output 7194 of the scaffolding db model with 7195 7196 db.as_dict() 7197 7198 Note that for compatibility with Python older than 7199 version 2.6.5 you should cast your dict input keys 7200 to str due to a syntax limitation on kwarg names. 7201 for proper DAL dictionary input you can use one of: 7202 7203 obj = serializers.cast_keys(dict, [encoding="utf-8"]) 7204 7205 or else (for parsing json input) 7206 7207 obj = serializers.loads_json(data, unicode_keys=False) 7208 7209 :pool_size: How many open connections to make to the database object. 7210 :folder: where .table files will be created. 7211 automatically set within web2py 7212 use an explicit path when using DAL outside web2py 7213 :db_codec: string encoding of the database (default: 'UTF-8') 7214 :check_reserved: list of adapters to check tablenames and column names 7215 against sql/nosql reserved keywords. (Default None) 7216 7217 * 'common' List of sql keywords that are common to all database types 7218 such as "SELECT, INSERT". (recommended) 7219 * 'all' Checks against all known SQL keywords. (not recommended) 7220 <adaptername> Checks against the specific adapters list of keywords 7221 (recommended) 7222 * '<adaptername>_nonreserved' Checks against the specific adapters 7223 list of nonreserved keywords. (if available) 7224 :migrate (defaults to True) sets default migrate behavior for all tables 7225 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables 7226 :migrate_enabled (defaults to True). If set to False disables ALL migrations 7227 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables 7228 :attempts (defaults to 5). Number of times to attempt connecting 7229 :auto_import (defaults to False). If set, import automatically table definitions from the 7230 databases folder 7231 :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields 7232 :lazy_tables (defaults to False): delay table definition until table access 7233 :after_connection (defaults to None): a callable that will be execute after the connection 7234 """ 7235 7236 items = None 7237 if isinstance(uri, dict): 7238 if "items" in uri: 7239 items = uri.pop("items") 7240 try: 7241 newuri = uri.pop("uri") 7242 except KeyError: 7243 newuri = DEFAULT_URI 7244 locals().update(uri) 7245 uri = newuri 7246 7247 if uri == '<zombie>' and db_uid is not None: return 7248 if not decode_credentials: 7249 credential_decoder = lambda cred: cred 7250 else: 7251 credential_decoder = lambda cred: urllib.unquote(cred) 7252 self._folder = folder 7253 if folder: 7254 self.set_folder(folder) 7255 self._uri = uri 7256 self._pool_size = pool_size 7257 self._db_codec = db_codec 7258 self._lastsql = '' 7259 self._timings = [] 7260 self._pending_references = {} 7261 self._request_tenant = 'request_tenant' 7262 self._common_fields = [] 7263 self._referee_name = '%(table)s' 7264 self._bigint_id = bigint_id 7265 self._debug = debug 7266 self._migrated = [] 7267 self._LAZY_TABLES = {} 7268 self._lazy_tables = lazy_tables 7269 self._tables = SQLCallableList() 7270 self._driver_args = driver_args 7271 self._adapter_args = adapter_args 7272 self._check_reserved = check_reserved 7273 self._decode_credentials = decode_credentials 7274 self._attempts = attempts 7275 self._do_connect = do_connect 7276 7277 if not str(attempts).isdigit() or attempts < 0: 7278 attempts = 5 7279 if uri: 7280 uris = isinstance(uri,(list,tuple)) and uri or [uri] 7281 error = '' 7282 connected = False 7283 for k in range(attempts): 7284 for uri in uris: 7285 try: 7286 if is_jdbc and not uri.startswith('jdbc:'): 7287 uri = 'jdbc:'+uri 7288 self._dbname = REGEX_DBNAME.match(uri).group() 7289 if not self._dbname in ADAPTERS: 7290 raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname) 7291 # notice that driver args or {} else driver_args 7292 # defaults to {} global, not correct 7293 kwargs = dict(db=self,uri=uri, 7294 pool_size=pool_size, 7295 folder=folder, 7296 db_codec=db_codec, 7297 credential_decoder=credential_decoder, 7298 driver_args=driver_args or {}, 7299 adapter_args=adapter_args or {}, 7300 do_connect=do_connect, 7301 after_connection=after_connection) 7302 self._adapter = ADAPTERS[self._dbname](**kwargs) 7303 types = ADAPTERS[self._dbname].types 7304 # copy so multiple DAL() possible 7305 self._adapter.types = copy.copy(types) 7306 if bigint_id: 7307 if 'big-id' in types and 'reference' in types: 7308 self._adapter.types['id'] = types['big-id'] 7309 self._adapter.types['reference'] = types['big-reference'] 7310 connected = True 7311 break 7312 except SyntaxError: 7313 raise 7314 except Exception: 7315 tb = traceback.format_exc() 7316 sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb)) 7317 if connected: 7318 break 7319 else: 7320 time.sleep(1) 7321 if not connected: 7322 raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb)) 7323 else: 7324 self._adapter = BaseAdapter(db=self,pool_size=0, 7325 uri='None',folder=folder, 7326 db_codec=db_codec, after_connection=after_connection) 7327 migrate = fake_migrate = False 7328 adapter = self._adapter 7329 self._uri_hash = hashlib_md5(adapter.uri).hexdigest() 7330 self.check_reserved = check_reserved 7331 if self.check_reserved: 7332 from reserved_sql_keywords import ADAPTERS as RSK 7333 self.RSK = RSK 7334 self._migrate = migrate 7335 self._fake_migrate = fake_migrate 7336 self._migrate_enabled = migrate_enabled 7337 self._fake_migrate_all = fake_migrate_all 7338 if auto_import or items: 7339 self.import_table_definitions(adapter.folder, 7340 items=items)
7341 7342 @property
7343 - def tables(self):
7344 return self._tables
7345
7346 - def import_table_definitions(self, path, migrate=False, 7347 fake_migrate=False, items=None):
7348 pattern = pjoin(path,self._uri_hash+'_*.table') 7349 if items: 7350 for tablename, table in items.iteritems(): 7351 # TODO: read all field/table options 7352 fields = [] 7353 # remove unsupported/illegal Table arguments 7354 [table.pop(name) for name in ("name", "fields") if 7355 name in table] 7356 if "items" in table: 7357 for fieldname, field in table.pop("items").iteritems(): 7358 # remove unsupported/illegal Field arguments 7359 [field.pop(key) for key in ("requires", "name", 7360 "compute", "colname") if key in field] 7361 fields.append(Field(str(fieldname), **field)) 7362 self.define_table(str(tablename), *fields, **table) 7363 else: 7364 for filename in glob.glob(pattern): 7365 tfile = self._adapter.file_open(filename, 'r') 7366 try: 7367 sql_fields = pickle.load(tfile) 7368 name = filename[len(pattern)-7:-6] 7369 mf = [(value['sortable'], 7370 Field(key, 7371 type=value['type'], 7372 length=value.get('length',None), 7373 notnull=value.get('notnull',False), 7374 unique=value.get('unique',False))) \ 7375 for key, value in sql_fields.iteritems()] 7376 mf.sort(lambda a,b: cmp(a[0],b[0])) 7377 self.define_table(name,*[item[1] for item in mf], 7378 **dict(migrate=migrate, 7379 fake_migrate=fake_migrate)) 7380 finally: 7381 self._adapter.file_close(tfile)
7382
7383 - def check_reserved_keyword(self, name):
7384 """ 7385 Validates ``name`` against SQL keywords 7386 Uses self.check_reserve which is a list of 7387 operators to use. 7388 self.check_reserved 7389 ['common', 'postgres', 'mysql'] 7390 self.check_reserved 7391 ['all'] 7392 """ 7393 for backend in self.check_reserved: 7394 if name.upper() in self.RSK[backend]: 7395 raise SyntaxError( 7396 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
7397
7398 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
7399 """ 7400 EXAMPLE: 7401 7402 db.define_table('person',Field('name'),Field('info')) 7403 db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info')) 7404 7405 @request.restful() 7406 def index(): 7407 def GET(*args,**vars): 7408 patterns = [ 7409 "/friends[person]", 7410 "/{person.name}/:field", 7411 "/{person.name}/pets[pet.ownedby]", 7412 "/{person.name}/pets[pet.ownedby]/{pet.name}", 7413 "/{person.name}/pets[pet.ownedby]/{pet.name}/:field", 7414 ("/dogs[pet]", db.pet.info=='dog'), 7415 ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'), 7416 ] 7417 parser = db.parse_as_rest(patterns,args,vars) 7418 if parser.status == 200: 7419 return dict(content=parser.response) 7420 else: 7421 raise HTTP(parser.status,parser.error) 7422 7423 def POST(table_name,**vars): 7424 if table_name == 'person': 7425 return db.person.validate_and_insert(**vars) 7426 elif table_name == 'pet': 7427 return db.pet.validate_and_insert(**vars) 7428 else: 7429 raise HTTP(400) 7430 return locals() 7431 """ 7432 7433 db = self 7434 re1 = REGEX_SEARCH_PATTERN 7435 re2 = REGEX_SQUARE_BRACKETS 7436 7437 def auto_table(table,base='',depth=0): 7438 patterns = [] 7439 for field in db[table].fields: 7440 if base: 7441 tag = '%s/%s' % (base,field.replace('_','-')) 7442 else: 7443 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-')) 7444 f = db[table][field] 7445 if not f.readable: continue 7446 if f.type=='id' or 'slug' in field or f.type.startswith('reference'): 7447 tag += '/{%s.%s}' % (table,field) 7448 patterns.append(tag) 7449 patterns.append(tag+'/:field') 7450 elif f.type.startswith('boolean'): 7451 tag += '/{%s.%s}' % (table,field) 7452 patterns.append(tag) 7453 patterns.append(tag+'/:field') 7454 elif f.type in ('float','double','integer','bigint'): 7455 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field) 7456 patterns.append(tag) 7457 patterns.append(tag+'/:field') 7458 elif f.type.startswith('list:'): 7459 tag += '/{%s.%s.contains}' % (table,field) 7460 patterns.append(tag) 7461 patterns.append(tag+'/:field') 7462 elif f.type in ('date','datetime'): 7463 tag+= '/{%s.%s.year}' % (table,field) 7464 patterns.append(tag) 7465 patterns.append(tag+'/:field') 7466 tag+='/{%s.%s.month}' % (table,field) 7467 patterns.append(tag) 7468 patterns.append(tag+'/:field') 7469 tag+='/{%s.%s.day}' % (table,field) 7470 patterns.append(tag) 7471 patterns.append(tag+'/:field') 7472 if f.type in ('datetime','time'): 7473 tag+= '/{%s.%s.hour}' % (table,field) 7474 patterns.append(tag) 7475 patterns.append(tag+'/:field') 7476 tag+='/{%s.%s.minute}' % (table,field) 7477 patterns.append(tag) 7478 patterns.append(tag+'/:field') 7479 tag+='/{%s.%s.second}' % (table,field) 7480 patterns.append(tag) 7481 patterns.append(tag+'/:field') 7482 if depth>0: 7483 for f in db[table]._referenced_by: 7484 tag+='/%s[%s.%s]' % (table,f.tablename,f.name) 7485 patterns.append(tag) 7486 patterns += auto_table(table,base=tag,depth=depth-1) 7487 return patterns
7488 7489 if patterns == 'auto': 7490 patterns=[] 7491 for table in db.tables: 7492 if not table.startswith('auth_'): 7493 patterns.append('/%s[%s]' % (table,table)) 7494 patterns += auto_table(table,base='',depth=1) 7495 else: 7496 i = 0 7497 while i<len(patterns): 7498 pattern = patterns[i] 7499 if not isinstance(pattern,str): 7500 pattern = pattern[0] 7501 tokens = pattern.split('/') 7502 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]): 7503 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1], 7504 '/'.join(tokens[:-1])) 7505 patterns = patterns[:i]+new_patterns+patterns[i+1:] 7506 i += len(new_patterns) 7507 else: 7508 i += 1 7509 if '/'.join(args) == 'patterns': 7510 return Row({'status':200,'pattern':'list', 7511 'error':None,'response':patterns}) 7512 for pattern in patterns: 7513 basequery, exposedfields = None, [] 7514 if isinstance(pattern,tuple): 7515 if len(pattern)==2: 7516 pattern, basequery = pattern 7517 elif len(pattern)>2: 7518 pattern, basequery, exposedfields = pattern[0:3] 7519 otable=table=None 7520 if not isinstance(queries,dict): 7521 dbset=db(queries) 7522 if basequery is not None: 7523 dbset = dbset(basequery) 7524 i=0 7525 tags = pattern[1:].split('/') 7526 if len(tags)!=len(args): 7527 continue 7528 for tag in tags: 7529 if re1.match(tag): 7530 # print 're1:'+tag 7531 tokens = tag[1:-1].split('.') 7532 table, field = tokens[0], tokens[1] 7533 if not otable or table == otable: 7534 if len(tokens)==2 or tokens[2]=='eq': 7535 query = db[table][field]==args[i] 7536 elif tokens[2]=='ne': 7537 query = db[table][field]!=args[i] 7538 elif tokens[2]=='lt': 7539 query = db[table][field]<args[i] 7540 elif tokens[2]=='gt': 7541 query = db[table][field]>args[i] 7542 elif tokens[2]=='ge': 7543 query = db[table][field]>=args[i] 7544 elif tokens[2]=='le': 7545 query = db[table][field]<=args[i] 7546 elif tokens[2]=='year': 7547 query = db[table][field].year()==args[i] 7548 elif tokens[2]=='month': 7549 query = db[table][field].month()==args[i] 7550 elif tokens[2]=='day': 7551 query = db[table][field].day()==args[i] 7552 elif tokens[2]=='hour': 7553 query = db[table][field].hour()==args[i] 7554 elif tokens[2]=='minute': 7555 query = db[table][field].minutes()==args[i] 7556 elif tokens[2]=='second': 7557 query = db[table][field].seconds()==args[i] 7558 elif tokens[2]=='startswith': 7559 query = db[table][field].startswith(args[i]) 7560 elif tokens[2]=='contains': 7561 query = db[table][field].contains(args[i]) 7562 else: 7563 raise RuntimeError("invalid pattern: %s" % pattern) 7564 if len(tokens)==4 and tokens[3]=='not': 7565 query = ~query 7566 elif len(tokens)>=4: 7567 raise RuntimeError("invalid pattern: %s" % pattern) 7568 if not otable and isinstance(queries,dict): 7569 dbset = db(queries[table]) 7570 if basequery is not None: 7571 dbset = dbset(basequery) 7572 dbset=dbset(query) 7573 else: 7574 raise RuntimeError("missing relation in pattern: %s" % pattern) 7575 elif re2.match(tag) and args[i]==tag[:tag.find('[')]: 7576 ref = tag[tag.find('[')+1:-1] 7577 if '.' in ref and otable: 7578 table,field = ref.split('.') 7579 selfld = '_id' 7580 if db[table][field].type.startswith('reference '): 7581 refs = [ x.name for x in db[otable] if x.type == db[table][field].type ] 7582 else: 7583 refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ] 7584 if refs: 7585 selfld = refs[0] 7586 if nested_select: 7587 try: 7588 dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld]))) 7589 except ValueError: 7590 return Row({'status':400,'pattern':pattern, 7591 'error':'invalid path','response':None}) 7592 else: 7593 items = [item.id for item in dbset.select(db[otable][selfld])] 7594 dbset=db(db[table][field].belongs(items)) 7595 else: 7596 table = ref 7597 if not otable and isinstance(queries,dict): 7598 dbset = db(queries[table]) 7599 dbset=dbset(db[table]) 7600 elif tag==':field' and table: 7601 # print 're3:'+tag 7602 field = args[i] 7603 if not field in db[table]: break 7604 # hand-built patterns should respect .readable=False as well 7605 if not db[table][field].readable: 7606 return Row({'status':418,'pattern':pattern, 7607 'error':'I\'m a teapot','response':None}) 7608 try: 7609 distinct = vars.get('distinct', False) == 'True' 7610 offset = int(vars.get('offset',None) or 0) 7611 limits = (offset,int(vars.get('limit',None) or 1000)+offset) 7612 except ValueError: 7613 return Row({'status':400,'error':'invalid limits','response':None}) 7614 items = dbset.select(db[table][field], distinct=distinct, limitby=limits) 7615 if items: 7616 return Row({'status':200,'response':items, 7617 'pattern':pattern}) 7618 else: 7619 return Row({'status':404,'pattern':pattern, 7620 'error':'no record found','response':None}) 7621 elif tag != args[i]: 7622 break 7623 otable = table 7624 i += 1 7625 if i==len(tags) and table: 7626 ofields = vars.get('order',db[table]._id.name).split('|') 7627 try: 7628 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields] 7629 except (KeyError, AttributeError): 7630 return Row({'status':400,'error':'invalid orderby','response':None}) 7631 if exposedfields: 7632 fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable] 7633 else: 7634 fields = [field for field in db[table] if field.readable] 7635 count = dbset.count() 7636 try: 7637 offset = int(vars.get('offset',None) or 0) 7638 limits = (offset,int(vars.get('limit',None) or 1000)+offset) 7639 except ValueError: 7640 return Row({'status':400,'error':'invalid limits','response':None}) 7641 if count > limits[1]-limits[0]: 7642 return Row({'status':400,'error':'too many records','response':None}) 7643 try: 7644 response = dbset.select(limitby=limits,orderby=orderby,*fields) 7645 except ValueError: 7646 return Row({'status':400,'pattern':pattern, 7647 'error':'invalid path','response':None}) 7648 return Row({'status':200,'response':response, 7649 'pattern':pattern,'count':count}) 7650 return Row({'status':400,'error':'no matching pattern','response':None})
7651
7652 - def define_table( 7653 self, 7654 tablename, 7655 *fields, 7656 **args 7657 ):
7658 if not isinstance(tablename,str): 7659 raise SyntaxError("missing table name") 7660 elif hasattr(self,tablename) or tablename in self.tables: 7661 if not args.get('redefine',False): 7662 raise SyntaxError('table already defined: %s' % tablename) 7663 elif tablename.startswith('_') or hasattr(self,tablename) or \ 7664 REGEX_PYTHON_KEYWORDS.match(tablename): 7665 raise SyntaxError('invalid table name: %s' % tablename) 7666 elif self.check_reserved: 7667 self.check_reserved_keyword(tablename) 7668 else: 7669 invalid_args = set(args)-TABLE_ARGS 7670 if invalid_args: 7671 raise SyntaxError('invalid table "%s" attributes: %s' \ 7672 % (tablename,invalid_args)) 7673 if self._lazy_tables and not tablename in self._LAZY_TABLES: 7674 self._LAZY_TABLES[tablename] = (tablename,fields,args) 7675 table = None 7676 else: 7677 table = self.lazy_define_table(tablename,*fields,**args) 7678 if not tablename in self.tables: 7679 self.tables.append(tablename) 7680 return table
7681
7682 - def lazy_define_table( 7683 self, 7684 tablename, 7685 *fields, 7686 **args 7687 ):
7688 args_get = args.get 7689 common_fields = self._common_fields 7690 if common_fields: 7691 fields = list(fields) + list(common_fields) 7692 7693 table_class = args_get('table_class',Table) 7694 table = table_class(self, tablename, *fields, **args) 7695 table._actual = True 7696 self[tablename] = table 7697 # must follow above line to handle self references 7698 table._create_references() 7699 for field in table: 7700 if field.requires == DEFAULT: 7701 field.requires = sqlhtml_validators(field) 7702 7703 migrate = self._migrate_enabled and args_get('migrate',self._migrate) 7704 if migrate and not self._uri in (None,'None') \ 7705 or self._adapter.dbengine=='google:datastore': 7706 fake_migrate = self._fake_migrate_all or \ 7707 args_get('fake_migrate',self._fake_migrate) 7708 polymodel = args_get('polymodel',None) 7709 try: 7710 GLOBAL_LOCKER.acquire() 7711 self._lastsql = self._adapter.create_table( 7712 table,migrate=migrate, 7713 fake_migrate=fake_migrate, 7714 polymodel=polymodel) 7715 finally: 7716 GLOBAL_LOCKER.release() 7717 else: 7718 table._dbt = None 7719 on_define = args_get('on_define',None) 7720 if on_define: on_define(table) 7721 return table
7722
7723 - def as_dict(self, flat=False, sanitize=True, field_options=True):
7724 dbname = db_uid = uri = None 7725 if not sanitize: 7726 uri, dbname, db_uid = (self._uri, self._dbname, self._db_uid) 7727 db_as_dict = dict(items={}, tables=[], uri=uri, dbname=dbname, 7728 db_uid=db_uid, 7729 **dict([(k, getattr(self, "_" + k)) for 7730 k in 'pool_size','folder','db_codec', 7731 'check_reserved','migrate','fake_migrate', 7732 'migrate_enabled','fake_migrate_all', 7733 'decode_credentials','driver_args', 7734 'adapter_args', 'attempts', 7735 'bigint_id','debug','lazy_tables', 7736 'do_connect'])) 7737 7738 for table in self: 7739 tablename = str(table) 7740 db_as_dict["tables"].append(tablename) 7741 db_as_dict["items"][tablename] = table.as_dict(flat=flat, 7742 sanitize=sanitize, 7743 field_options=field_options) 7744 return db_as_dict
7745
7746 - def as_xml(self, sanitize=True, field_options=True):
7747 if not have_serializers: 7748 raise ImportError("No xml serializers available") 7749 d = self.as_dict(flat=True, sanitize=sanitize, 7750 field_options=field_options) 7751 return serializers.xml(d)
7752
7753 - def as_json(self, sanitize=True, field_options=True):
7754 if not have_serializers: 7755 raise ImportError("No json serializers available") 7756 d = self.as_dict(flat=True, sanitize=sanitize, 7757 field_options=field_options) 7758 return serializers.json(d)
7759
7760 - def as_yaml(self, sanitize=True, field_options=True):
7761 if not have_serializers: 7762 raise ImportError("No YAML serializers available") 7763 d = self.as_dict(flat=True, sanitize=sanitize, 7764 field_options=field_options) 7765 return serializers.yaml(d)
7766
7767 - def __contains__(self, tablename):
7768 try: 7769 return tablename in self.tables 7770 except AttributeError: 7771 # The instance has no .tables attribute yet 7772 return False
7773 7774 has_key = __contains__ 7775
7776 - def get(self,key,default=None):
7777 return self.__dict__.get(key,default)
7778
7779 - def __iter__(self):
7780 for tablename in self.tables: 7781 yield self[tablename]
7782
7783 - def __getitem__(self, key):
7784 return self.__getattr__(str(key))
7785
7786 - def __getattr__(self, key):
7787 if ogetattr(self,'_lazy_tables') and \ 7788 key in ogetattr(self,'_LAZY_TABLES'): 7789 tablename, fields, args = self._LAZY_TABLES.pop(key) 7790 return self.lazy_define_table(tablename,*fields,**args) 7791 return ogetattr(self, key)
7792
7793 - def __setitem__(self, key, value):
7794 osetattr(self, str(key), value)
7795
7796 - def __setattr__(self, key, value):
7797 if key[:1]!='_' and key in self: 7798 raise SyntaxError( 7799 'Object %s exists and cannot be redefined' % key) 7800 osetattr(self,key,value)
7801 7802 __delitem__ = object.__delattr__ 7803
7804 - def __repr__(self):
7805 if hasattr(self,'_uri'): 7806 return '<DAL uri="%s">' % hide_password(str(self._uri)) 7807 else: 7808 return '<DAL db_uid="%s">' % self._db_uid
7809
7810 - def smart_query(self,fields,text):
7811 return Set(self, smart_query(fields,text))
7812
7813 - def __call__(self, query=None, ignore_common_filters=None):
7814 if isinstance(query,Table): 7815 query = self._adapter.id_query(query) 7816 elif isinstance(query,Field): 7817 query = query!=None 7818 elif isinstance(query, dict): 7819 icf = query.get("ignore_common_filters") 7820 if icf: ignore_common_filters = icf 7821 return Set(self, query, ignore_common_filters=ignore_common_filters)
7822
7823 - def commit(self):
7824 self._adapter.commit()
7825
7826 - def rollback(self):
7827 self._adapter.rollback()
7828
7829 - def close(self):
7830 self._adapter.close() 7831 if self._db_uid in THREAD_LOCAL.db_instances: 7832 db_group = THREAD_LOCAL.db_instances[self._db_uid] 7833 db_group.remove(self) 7834 if not db_group: 7835 del THREAD_LOCAL.db_instances[self._db_uid]
7836
7837 - def executesql(self, query, placeholders=None, as_dict=False, 7838 fields=None, colnames=None):
7839 """ 7840 placeholders is optional and will always be None. 7841 If using raw SQL with placeholders, placeholders may be 7842 a sequence of values to be substituted in 7843 or, (if supported by the DB driver), a dictionary with keys 7844 matching named placeholders in your SQL. 7845 7846 Added 2009-12-05 "as_dict" optional argument. Will always be 7847 None when using DAL. If using raw SQL can be set to True 7848 and the results cursor returned by the DB driver will be 7849 converted to a sequence of dictionaries keyed with the db 7850 field names. Tested with SQLite but should work with any database 7851 since the cursor.description used to get field names is part of the 7852 Python dbi 2.0 specs. Results returned with as_dict=True are 7853 the same as those returned when applying .to_list() to a DAL query. 7854 7855 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}] 7856 7857 Added 2012-08-24 "fields" and "colnames" optional arguments. If either 7858 is provided, the results cursor returned by the DB driver will be 7859 converted to a DAL Rows object using the db._adapter.parse() method. 7860 7861 The "fields" argument is a list of DAL Field objects that match the 7862 fields returned from the DB. The Field objects should be part of one or 7863 more Table objects defined on the DAL object. The "fields" list can 7864 include one or more DAL Table objects in addition to or instead of 7865 including Field objects, or it can be just a single table (not in a 7866 list). In that case, the Field objects will be extracted from the 7867 table(s). 7868 7869 Instead of specifying the "fields" argument, the "colnames" argument 7870 can be specified as a list of field names in tablename.fieldname format. 7871 Again, these should represent tables and fields defined on the DAL 7872 object. 7873 7874 It is also possible to specify both "fields" and the associated 7875 "colnames". In that case, "fields" can also include DAL Expression 7876 objects in addition to Field objects. For Field objects in "fields", 7877 the associated "colnames" must still be in tablename.fieldname format. 7878 For Expression objects in "fields", the associated "colnames" can 7879 be any arbitrary labels. 7880 7881 Note, the DAL Table objects referred to by "fields" or "colnames" can 7882 be dummy tables and do not have to represent any real tables in the 7883 database. Also, note that the "fields" and "colnames" must be in the 7884 same order as the fields in the results cursor returned from the DB. 7885 """ 7886 adapter = self._adapter 7887 if placeholders: 7888 adapter.execute(query, placeholders) 7889 else: 7890 adapter.execute(query) 7891 if as_dict: 7892 if not hasattr(adapter.cursor,'description'): 7893 raise RuntimeError("database does not support executesql(...,as_dict=True)") 7894 # Non-DAL legacy db query, converts cursor results to dict. 7895 # sequence of 7-item sequences. each sequence tells about a column. 7896 # first item is always the field name according to Python Database API specs 7897 columns = adapter.cursor.description 7898 # reduce the column info down to just the field names 7899 fields = [f[0] for f in columns] 7900 # will hold our finished resultset in a list 7901 data = adapter._fetchall() 7902 # convert the list for each row into a dictionary so it's 7903 # easier to work with. row['field_name'] rather than row[0] 7904 return [dict(zip(fields,row)) for row in data] 7905 try: 7906 data = adapter._fetchall() 7907 except: 7908 return None 7909 if fields or colnames: 7910 fields = [] if fields is None else fields 7911 if not isinstance(fields, list): 7912 fields = [fields] 7913 extracted_fields = [] 7914 for field in fields: 7915 if isinstance(field, Table): 7916 extracted_fields.extend([f for f in field]) 7917 else: 7918 extracted_fields.append(field) 7919 if not colnames: 7920 colnames = ['%s.%s' % (f.tablename, f.name) 7921 for f in extracted_fields] 7922 data = adapter.parse( 7923 data, fields=extracted_fields, colnames=colnames) 7924 return data
7925
7926 - def _remove_references_to(self, thistable):
7927 for table in self: 7928 table._referenced_by = [field for field in table._referenced_by 7929 if not field.table==thistable]
7930
7931 - def export_to_csv_file(self, ofile, *args, **kwargs):
7932 step = int(kwargs.get('max_fetch_rows,',500)) 7933 write_colnames = kwargs['write_colnames'] = \ 7934 kwargs.get("write_colnames", True) 7935 for table in self.tables: 7936 ofile.write('TABLE %s\r\n' % table) 7937 query = self._adapter.id_query(self[table]) 7938 nrows = self(query).count() 7939 kwargs['write_colnames'] = write_colnames 7940 for k in range(0,nrows,step): 7941 self(query).select(limitby=(k,k+step)).export_to_csv_file( 7942 ofile, *args, **kwargs) 7943 kwargs['write_colnames'] = False 7944 ofile.write('\r\n\r\n') 7945 ofile.write('END')
7946
7947 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>', 7948 unique='uuid', *args, **kwargs):
7949 #if id_map is None: id_map={} 7950 id_offset = {} # only used if id_map is None 7951 for line in ifile: 7952 line = line.strip() 7953 if not line: 7954 continue 7955 elif line == 'END': 7956 return 7957 elif not line.startswith('TABLE ') or not line[6:] in self.tables: 7958 raise SyntaxError('invalid file format') 7959 else: 7960 tablename = line[6:] 7961 self[tablename].import_from_csv_file( 7962 ifile, id_map, null, unique, id_offset, *args, **kwargs)
7963
7964 -def DAL_unpickler(db_uid):
7965 return DAL('<zombie>',db_uid=db_uid)
7966
7967 -def DAL_pickler(db):
7968 return DAL_unpickler, (db._db_uid,)
7969 7970 copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
7971 7972 -class SQLALL(object):
7973 """ 7974 Helper class providing a comma-separated string having all the field names 7975 (prefixed by table name and '.') 7976 7977 normally only called from within gluon.sql 7978 """ 7979
7980 - def __init__(self, table):
7981 self._table = table
7982
7983 - def __str__(self):
7984 return ', '.join([str(field) for field in self._table])
7985
7986 # class Reference(int): 7987 -class Reference(long):
7988
7989 - def __allocate(self):
7990 if not self._record: 7991 self._record = self._table[int(self)] 7992 if not self._record: 7993 raise RuntimeError( 7994 "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self)))
7995
7996 - def __getattr__(self, key):
7997 if key == 'id': 7998 return int(self) 7999 self.__allocate() 8000 return self._record.get(key, None)
8001
8002 - def get(self, key, default=None):
8003 return self.__getattr__(key, default)
8004
8005 - def __setattr__(self, key, value):
8006 if key.startswith('_'): 8007 int.__setattr__(self, key, value) 8008 return 8009 self.__allocate() 8010 self._record[key] = value
8011
8012 - def __getitem__(self, key):
8013 if key == 'id': 8014 return int(self) 8015 self.__allocate() 8016 return self._record.get(key, None)
8017
8018 - def __setitem__(self,key,value):
8019 self.__allocate() 8020 self._record[key] = value
8021
8022 8023 -def Reference_unpickler(data):
8024 return marshal.loads(data)
8025
8026 -def Reference_pickler(data):
8027 try: 8028 marshal_dump = marshal.dumps(int(data)) 8029 except AttributeError: 8030 marshal_dump = 'i%s' % struct.pack('<i', int(data)) 8031 return (Reference_unpickler, (marshal_dump,))
8032 8033 copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
8034 8035 8036 -class Table(object):
8037 8038 """ 8039 an instance of this class represents a database table 8040 8041 Example:: 8042 8043 db = DAL(...) 8044 db.define_table('users', Field('name')) 8045 db.users.insert(name='me') # print db.users._insert(...) to see SQL 8046 db.users.drop() 8047 """ 8048
8049 - def __init__( 8050 self, 8051 db, 8052 tablename, 8053 *fields, 8054 **args 8055 ):
8056 """ 8057 Initializes the table and performs checking on the provided fields. 8058 8059 Each table will have automatically an 'id'. 8060 8061 If a field is of type Table, the fields (excluding 'id') from that table 8062 will be used instead. 8063 8064 :raises SyntaxError: when a supplied field is of incorrect type. 8065 """ 8066 self._actual = False # set to True by define_table() 8067 self._tablename = tablename 8068 self._sequence_name = args.get('sequence_name',None) or \ 8069 db and db._adapter.sequence_name(tablename) 8070 self._trigger_name = args.get('trigger_name',None) or \ 8071 db and db._adapter.trigger_name(tablename) 8072 self._common_filter = args.get('common_filter', None) 8073 self._format = args.get('format',None) 8074 self._singular = args.get( 8075 'singular',tablename.replace('_',' ').capitalize()) 8076 self._plural = args.get( 8077 'plural',pluralize(self._singular.lower()).capitalize()) 8078 # horrible but for backard compatibility of appamdin: 8079 if 'primarykey' in args and args['primarykey']: 8080 self._primarykey = args.get('primarykey', None) 8081 8082 self._before_insert = [] 8083 self._before_update = [Set.delete_uploaded_files] 8084 self._before_delete = [Set.delete_uploaded_files] 8085 self._after_insert = [] 8086 self._after_update = [] 8087 self._after_delete = [] 8088 8089 fieldnames,newfields=set(),[] 8090 if hasattr(self,'_primarykey'): 8091 if not isinstance(self._primarykey,list): 8092 raise SyntaxError( 8093 "primarykey must be a list of fields from table '%s'" \ 8094 % tablename) 8095 if len(self._primarykey)==1: 8096 self._id = [f for f in fields if isinstance(f,Field) \ 8097 and f.name==self._primarykey[0]][0] 8098 elif not [f for f in fields if isinstance(f,Field) and f.type=='id']: 8099 field = Field('id', 'id') 8100 newfields.append(field) 8101 fieldnames.add('id') 8102 self._id = field 8103 virtual_fields = [] 8104 for field in fields: 8105 if isinstance(field, (FieldMethod, FieldVirtual)): 8106 virtual_fields.append(field) 8107 elif isinstance(field, Field) and not field.name in fieldnames: 8108 if field.db is not None: 8109 field = copy.copy(field) 8110 newfields.append(field) 8111 fieldnames.add(field.name) 8112 if field.type=='id': 8113 self._id = field 8114 elif isinstance(field, Table): 8115 table = field 8116 for field in table: 8117 if not field.name in fieldnames and not field.type=='id': 8118 t2 = not table._actual and self._tablename 8119 field = field.clone(point_self_references_to=t2) 8120 newfields.append(field) 8121 fieldnames.add(field.name) 8122 elif not isinstance(field, (Field, Table)): 8123 raise SyntaxError( 8124 'define_table argument is not a Field or Table: %s' % field) 8125 fields = newfields 8126 self._db = db 8127 tablename = tablename 8128 self._fields = SQLCallableList() 8129 self.virtualfields = [] 8130 fields = list(fields) 8131 8132 if db and db._adapter.uploads_in_blob==True: 8133 uploadfields = [f.name for f in fields if f.type=='blob'] 8134 for field in fields: 8135 fn = field.uploadfield 8136 if isinstance(field, Field) and field.type == 'upload'\ 8137 and fn is True: 8138 fn = field.uploadfield = '%s_blob' % field.name 8139 if isinstance(fn,str) and not fn in uploadfields: 8140 fields.append(Field(fn,'blob',default='', 8141 writable=False,readable=False)) 8142 8143 lower_fieldnames = set() 8144 reserved = dir(Table) + ['fields'] 8145 for field in fields: 8146 field_name = field.name 8147 if db and db.check_reserved: 8148 db.check_reserved_keyword(field_name) 8149 elif field_name in reserved: 8150 raise SyntaxError("field name %s not allowed" % field_name) 8151 8152 if field_name.lower() in lower_fieldnames: 8153 raise SyntaxError("duplicate field %s in table %s" \ 8154 % (field_name, tablename)) 8155 else: 8156 lower_fieldnames.add(field_name.lower()) 8157 8158 self.fields.append(field_name) 8159 self[field_name] = field 8160 if field.type == 'id': 8161 self['id'] = field 8162 field.tablename = field._tablename = tablename 8163 field.table = field._table = self 8164 field.db = field._db = db 8165 if db and not field.type in ('text', 'blob', 'json') and \ 8166 db._adapter.maxcharlength < field.length: 8167 field.length = db._adapter.maxcharlength 8168 self.ALL = SQLALL(self) 8169 8170 if hasattr(self,'_primarykey'): 8171 for k in self._primarykey: 8172 if k not in self.fields: 8173 raise SyntaxError( 8174 "primarykey must be a list of fields from table '%s " % tablename) 8175 else: 8176 self[k].notnull = True 8177 for field in virtual_fields: 8178 self[field.name] = field
8179 8180 @property
8181 - def fields(self):
8182 return self._fields
8183
8184 - def update(self,*args,**kwargs):
8185 raise RuntimeError("Syntax Not Supported")
8186
8187 - def _enable_record_versioning(self, 8188 archive_db=None, 8189 archive_name = '%(tablename)s_archive', 8190 current_record = 'current_record', 8191 is_active = 'is_active'):
8192 archive_db = archive_db or self._db 8193 archive_name = archive_name % dict(tablename=self._tablename) 8194 if archive_name in archive_db.tables(): 8195 return # do not try define the archive if already exists 8196 fieldnames = self.fields() 8197 field_type = self if archive_db is self._db else 'bigint' 8198 archive_db.define_table( 8199 archive_name, 8200 Field(current_record,field_type), 8201 *[field.clone(unique=False) for field in self]) 8202 self._before_update.append( 8203 lambda qset,fs,db=archive_db,an=archive_name,cn=current_record: 8204 archive_record(qset,fs,db[an],cn)) 8205 if is_active and is_active in fieldnames: 8206 self._before_delete.append( 8207 lambda qset: qset.update(is_active=False)) 8208 newquery = lambda query, t=self: t.is_active == True 8209 query = self._common_filter 8210 if query: 8211 newquery = query & newquery 8212 self._common_filter = newquery
8213
8214 - def _validate(self,**vars):
8215 errors = Row() 8216 for key,value in vars.iteritems(): 8217 value,error = self[key].validate(value) 8218 if error: 8219 errors[key] = error 8220 return errors
8221
8222 - def _create_references(self):
8223 db = self._db 8224 pr = db._pending_references 8225 self._referenced_by = [] 8226 for field in self: 8227 fieldname = field.name 8228 field_type = field.type 8229 if isinstance(field_type,str) and field_type[:10] == 'reference ': 8230 ref = field_type[10:].strip() 8231 if not ref.split(): 8232 raise SyntaxError('Table: reference to nothing: %s' %ref) 8233 refs = ref.split('.') 8234 rtablename = refs[0] 8235 if not rtablename in db: 8236 pr[rtablename] = pr.get(rtablename,[]) + [field] 8237 continue 8238 rtable = db[rtablename] 8239 if len(refs)==2: 8240 rfieldname = refs[1] 8241 if not hasattr(rtable,'_primarykey'): 8242 raise SyntaxError( 8243 'keyed tables can only reference other keyed tables (for now)') 8244 if rfieldname not in rtable.fields: 8245 raise SyntaxError( 8246 "invalid field '%s' for referenced table '%s' in table '%s'" \ 8247 % (rfieldname, rtablename, self._tablename)) 8248 rtable._referenced_by.append(field) 8249 for referee in pr.get(self._tablename,[]): 8250 self._referenced_by.append(referee)
8251
8252 - def _filter_fields(self, record, id=False):
8253 return dict([(k, v) for (k, v) in record.iteritems() if k 8254 in self.fields and (self[k].type!='id' or id)])
8255
8256 - def _build_query(self,key):
8257 """ for keyed table only """ 8258 query = None 8259 for k,v in key.iteritems(): 8260 if k in self._primarykey: 8261 if query: 8262 query = query & (self[k] == v) 8263 else: 8264 query = (self[k] == v) 8265 else: 8266 raise SyntaxError( 8267 'Field %s is not part of the primary key of %s' % \ 8268 (k,self._tablename)) 8269 return query
8270
8271 - def __getitem__(self, key):
8272 if not key: 8273 return None 8274 elif isinstance(key, dict): 8275 """ for keyed table """ 8276 query = self._build_query(key) 8277 rows = self._db(query).select() 8278 if rows: 8279 return rows[0] 8280 return None 8281 elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key): 8282 return self._db(self._id == key).select(limitby=(0,1)).first() 8283 elif key: 8284 return ogetattr(self, str(key))
8285
8286 - def __call__(self, key=DEFAULT, **kwargs):
8287 for_update = kwargs.get('_for_update',False) 8288 if '_for_update' in kwargs: del kwargs['_for_update'] 8289 8290 orderby = kwargs.get('_orderby',None) 8291 if '_orderby' in kwargs: del kwargs['_orderby'] 8292 8293 if not key is DEFAULT: 8294 if isinstance(key, Query): 8295 record = self._db(key).select( 8296 limitby=(0,1),for_update=for_update, orderby=orderby).first() 8297 elif not str(key).isdigit(): 8298 record = None 8299 else: 8300 record = self._db(self._id == key).select( 8301 limitby=(0,1),for_update=for_update, orderby=orderby).first() 8302 if record: 8303 for k,v in kwargs.iteritems(): 8304 if record[k]!=v: return None 8305 return record 8306 elif kwargs: 8307 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()]) 8308 return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby).first() 8309 else: 8310 return None
8311
8312 - def __setitem__(self, key, value):
8313 if isinstance(key, dict) and isinstance(value, dict): 8314 """ option for keyed table """ 8315 if set(key.keys()) == set(self._primarykey): 8316 value = self._filter_fields(value) 8317 kv = {} 8318 kv.update(value) 8319 kv.update(key) 8320 if not self.insert(**kv): 8321 query = self._build_query(key) 8322 self._db(query).update(**self._filter_fields(value)) 8323 else: 8324 raise SyntaxError( 8325 'key must have all fields from primary key: %s'%\ 8326 (self._primarykey)) 8327 elif str(key).isdigit(): 8328 if key == 0: 8329 self.insert(**self._filter_fields(value)) 8330 elif self._db(self._id == key)\ 8331 .update(**self._filter_fields(value)) is None: 8332 raise SyntaxError('No such record: %s' % key) 8333 else: 8334 if isinstance(key, dict): 8335 raise SyntaxError( 8336 'value must be a dictionary: %s' % value) 8337 osetattr(self, str(key), value)
8338 8339 __getattr__ = __getitem__ 8340
8341 - def __setattr__(self, key, value):
8342 if key[:1]!='_' and key in self: 8343 raise SyntaxError('Object exists and cannot be redefined: %s' % key) 8344 osetattr(self,key,value)
8345
8346 - def __delitem__(self, key):
8347 if isinstance(key, dict): 8348 query = self._build_query(key) 8349 if not self._db(query).delete(): 8350 raise SyntaxError('No such record: %s' % key) 8351 elif not str(key).isdigit() or \ 8352 not self._db(self._id == key).delete(): 8353 raise SyntaxError('No such record: %s' % key)
8354
8355 - def __contains__(self,key):
8356 return hasattr(self,key)
8357 8358 has_key = __contains__ 8359
8360 - def items(self):
8361 return self.__dict__.items()
8362
8363 - def __iter__(self):
8364 for fieldname in self.fields: 8365 yield self[fieldname]
8366
8367 - def iteritems(self):
8368 return self.__dict__.iteritems()
8369 8370
8371 - def __repr__(self):
8372 return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
8373
8374 - def __str__(self):
8375 if hasattr(self,'_ot') and self._ot is not None: 8376 if 'Oracle' in str(type(self._db._adapter)): # <<< patch 8377 return '%s %s' % (self._ot, self._tablename) # <<< patch 8378 return '%s AS %s' % (self._ot, self._tablename) 8379 return self._tablename
8380
8381 - def _drop(self, mode = ''):
8382 return self._db._adapter._drop(self, mode)
8383
8384 - def drop(self, mode = ''):
8385 return self._db._adapter.drop(self,mode)
8386
8387 - def _listify(self,fields,update=False):
8388 new_fields = {} # format: new_fields[name] = (field,value) 8389 8390 # store all fields passed as input in new_fields 8391 for name in fields: 8392 if not name in self.fields: 8393 if name != 'id': 8394 raise SyntaxError( 8395 'Field %s does not belong to the table' % name) 8396 else: 8397 field = self[name] 8398 value = fields[name] 8399 if field.filter_in: 8400 value = field.filter_in(value) 8401 new_fields[name] = (field,value) 8402 8403 # check all fields that should be in the table but are not passed 8404 to_compute = [] 8405 for ofield in self: 8406 name = ofield.name 8407 if not name in new_fields: 8408 # if field is supposed to be computed, compute it! 8409 if ofield.compute: # save those to compute for later 8410 to_compute.append((name,ofield)) 8411 # if field is required, check its default value 8412 elif not update and not ofield.default is None: 8413 value = ofield.default 8414 fields[name] = value 8415 new_fields[name] = (ofield,value) 8416 # if this is an update, user the update field instead 8417 elif update and not ofield.update is None: 8418 value = ofield.update 8419 fields[name] = value 8420 new_fields[name] = (ofield,value) 8421 # if the field is still not there but it should, error 8422 elif not update and ofield.required: 8423 raise RuntimeError( 8424 'Table: missing required field: %s' % name) 8425 # now deal with fields that are supposed to be computed 8426 if to_compute: 8427 row = Row(fields) 8428 for name,ofield in to_compute: 8429 # try compute it 8430 try: 8431 new_fields[name] = (ofield,ofield.compute(row)) 8432 except (KeyError, AttributeError): 8433 # error sinlently unless field is required! 8434 if ofield.required: 8435 raise SyntaxError('unable to comput field: %s' % name) 8436 return new_fields.values()
8437
8438 - def _attempt_upload(self, fields):
8439 for field in self: 8440 if field.type=='upload' and field.name in fields: 8441 value = fields[field.name] 8442 if value and not isinstance(value,str): 8443 if hasattr(value,'file') and hasattr(value,'filename'): 8444 new_name = field.store(value.file,filename=value.filename) 8445 elif hasattr(value,'read') and hasattr(value,'name'): 8446 new_name = field.store(value,filename=value.name) 8447 else: 8448 raise RuntimeError("Unable to handle upload") 8449 fields[field.name] = new_name
8450
8451 - def _defaults(self, fields):
8452 "If there are no fields/values specified, return table defaults" 8453 if not fields: 8454 fields = {} 8455 for field in self: 8456 if field.type != "id": 8457 fields[field.name] = field.default 8458 return fields
8459
8460 - def _insert(self, **fields):
8461 fields = self._defaults(fields) 8462 return self._db._adapter._insert(self, self._listify(fields))
8463
8464 - def insert(self, **fields):
8465 fields = self._defaults(fields) 8466 self._attempt_upload(fields) 8467 if any(f(fields) for f in self._before_insert): return 0 8468 ret = self._db._adapter.insert(self, self._listify(fields)) 8469 if ret and self._after_insert: 8470 fields = Row(fields) 8471 [f(fields,ret) for f in self._after_insert] 8472 return ret
8473
8474 - def validate_and_insert(self,**fields):
8475 response = Row() 8476 response.errors = Row() 8477 new_fields = copy.copy(fields) 8478 for key,value in fields.iteritems(): 8479 value,error = self[key].validate(value) 8480 if error: 8481 response.errors[key] = "%s" % error 8482 else: 8483 new_fields[key] = value 8484 if not response.errors: 8485 response.id = self.insert(**new_fields) 8486 else: 8487 response.id = None 8488 return response
8489
8490 - def update_or_insert(self, _key=DEFAULT, **values):
8491 if _key is DEFAULT: 8492 record = self(**values) 8493 elif isinstance(_key,dict): 8494 record = self(**_key) 8495 else: 8496 record = self(_key) 8497 if record: 8498 record.update_record(**values) 8499 newid = None 8500 else: 8501 newid = self.insert(**values) 8502 return newid
8503
8504 - def bulk_insert(self, items):
8505 """ 8506 here items is a list of dictionaries 8507 """ 8508 items = [self._listify(item) for item in items] 8509 if any(f(item) for item in items for f in self._before_insert):return 0 8510 ret = self._db._adapter.bulk_insert(self,items) 8511 ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert] 8512 return ret
8513
8514 - def _truncate(self, mode = None):
8515 return self._db._adapter._truncate(self, mode)
8516
8517 - def truncate(self, mode = None):
8518 return self._db._adapter.truncate(self, mode)
8519
8520 - def import_from_csv_file( 8521 self, 8522 csvfile, 8523 id_map=None, 8524 null='<NULL>', 8525 unique='uuid', 8526 id_offset=None, # id_offset used only when id_map is None 8527 *args, **kwargs 8528 ):
8529 """ 8530 Import records from csv file. 8531 Column headers must have same names as table fields. 8532 Field 'id' is ignored. 8533 If column names read 'table.file' the 'table.' prefix is ignored. 8534 'unique' argument is a field which must be unique 8535 (typically a uuid field) 8536 'restore' argument is default False; 8537 if set True will remove old values in table first. 8538 'id_map' ff set to None will not map ids. 8539 The import will keep the id numbers in the restored table. 8540 This assumes that there is an field of type id that 8541 is integer and in incrementing order. 8542 Will keep the id numbers in restored table. 8543 """ 8544 8545 delimiter = kwargs.get('delimiter', ',') 8546 quotechar = kwargs.get('quotechar', '"') 8547 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL) 8548 restore = kwargs.get('restore', False) 8549 if restore: 8550 self._db[self].truncate() 8551 8552 reader = csv.reader(csvfile, delimiter=delimiter, 8553 quotechar=quotechar, quoting=quoting) 8554 colnames = None 8555 if isinstance(id_map, dict): 8556 if not self._tablename in id_map: 8557 id_map[self._tablename] = {} 8558 id_map_self = id_map[self._tablename] 8559 8560 def fix(field, value, id_map, id_offset): 8561 list_reference_s='list:reference' 8562 if value == null: 8563 value = None 8564 elif field.type=='blob': 8565 value = base64.b64decode(value) 8566 elif field.type=='double' or field.type=='float': 8567 if not value.strip(): 8568 value = None 8569 else: 8570 value = float(value) 8571 elif field.type in ('integer','bigint'): 8572 if not value.strip(): 8573 value = None 8574 else: 8575 value = int(value) 8576 elif field.type.startswith('list:string'): 8577 value = bar_decode_string(value) 8578 elif field.type.startswith(list_reference_s): 8579 ref_table = field.type[len(list_reference_s):].strip() 8580 if id_map is not None: 8581 value = [id_map[ref_table][int(v)] \ 8582 for v in bar_decode_string(value)] 8583 else: 8584 value = [v for v in bar_decode_string(value)] 8585 elif field.type.startswith('list:'): 8586 value = bar_decode_integer(value) 8587 elif id_map and field.type.startswith('reference'): 8588 try: 8589 value = id_map[field.type[9:].strip()][int(value)] 8590 except KeyError: 8591 pass 8592 elif id_offset and field.type.startswith('reference'): 8593 try: 8594 value = id_offset[field.type[9:].strip()]+int(value) 8595 except KeyError: 8596 pass 8597 return (field.name, value)
8598 8599 def is_id(colname): 8600 if colname in self: 8601 return self[colname].type == 'id' 8602 else: 8603 return False
8604 8605 first = True 8606 unique_idx = None 8607 for line in reader: 8608 if not line: 8609 break 8610 if not colnames: 8611 colnames = [x.split('.',1)[-1] for x in line][:len(line)] 8612 cols, cid = [], None 8613 for i,colname in enumerate(colnames): 8614 if is_id(colname): 8615 cid = i 8616 else: 8617 cols.append(i) 8618 if colname == unique: 8619 unique_idx = i 8620 else: 8621 items = [fix(self[colnames[i]], line[i], id_map, id_offset) \ 8622 for i in cols if colnames[i] in self.fields] 8623 8624 if not id_map and cid is not None and id_offset is not None and not unique_idx: 8625 csv_id = int(line[cid]) 8626 curr_id = self.insert(**dict(items)) 8627 if first: 8628 first = False 8629 # First curr_id is bigger than csv_id, 8630 # then we are not restoring but 8631 # extending db table with csv db table 8632 if curr_id>csv_id: 8633 id_offset[self._tablename] = curr_id-csv_id 8634 else: 8635 id_offset[self._tablename] = 0 8636 # create new id until we get the same as old_id+offset 8637 while curr_id<csv_id+id_offset[self._tablename]: 8638 self._db(self._db[self][colnames[cid]] == curr_id).delete() 8639 curr_id = self.insert(**dict(items)) 8640 # Validation. Check for duplicate of 'unique' &, 8641 # if present, update instead of insert. 8642 elif not unique_idx: 8643 new_id = self.insert(**dict(items)) 8644 else: 8645 unique_value = line[unique_idx] 8646 query = self._db[self][unique] == unique_value 8647 record = self._db(query).select().first() 8648 if record: 8649 record.update_record(**dict(items)) 8650 new_id = record[self._id.name] 8651 else: 8652 new_id = self.insert(**dict(items)) 8653 if id_map and cid is not None: 8654 id_map_self[int(line[cid])] = new_id 8655
8656 - def as_dict(self, flat=False, sanitize=True, field_options=True):
8657 tablename = str(self) 8658 table_as_dict = dict(name=tablename, items={}, fields=[], 8659 sequence_name=self._sequence_name, 8660 trigger_name=self._trigger_name, 8661 common_filter=self._common_filter, format=self._format, 8662 singular=self._singular, plural=self._plural) 8663 8664 for field in self: 8665 if (field.readable or field.writable) or (not sanitize): 8666 table_as_dict["fields"].append(field.name) 8667 table_as_dict["items"][field.name] = \ 8668 field.as_dict(flat=flat, sanitize=sanitize, 8669 options=field_options) 8670 return table_as_dict
8671
8672 - def as_xml(self, sanitize=True, field_options=True):
8673 if not have_serializers: 8674 raise ImportError("No xml serializers available") 8675 d = self.as_dict(flat=True, sanitize=sanitize, 8676 field_options=field_options) 8677 return serializers.xml(d)
8678
8679 - def as_json(self, sanitize=True, field_options=True):
8680 if not have_serializers: 8681 raise ImportError("No json serializers available") 8682 d = self.as_dict(flat=True, sanitize=sanitize, 8683 field_options=field_options) 8684 return serializers.json(d)
8685
8686 - def as_yaml(self, sanitize=True, field_options=True):
8687 if not have_serializers: 8688 raise ImportError("No YAML serializers available") 8689 d = self.as_dict(flat=True, sanitize=sanitize, 8690 field_options=field_options) 8691 return serializers.yaml(d)
8692
8693 - def with_alias(self, alias):
8694 return self._db._adapter.alias(self,alias)
8695
8696 - def on(self, query):
8697 return Expression(self._db,self._db._adapter.ON,self,query)
8698
8699 -def archive_record(qset,fs,archive_table,current_record):
8700 tablenames = qset.db._adapter.tables(qset.query) 8701 if len(tablenames)!=1: raise RuntimeError("cannot update join") 8702 table = qset.db[tablenames[0]] 8703 for row in qset.select(): 8704 fields = archive_table._filter_fields(row) 8705 fields[current_record] = row.id 8706 archive_table.insert(**fields) 8707 return False
8708
8709 8710 8711 -class Expression(object):
8712
8713 - def __init__( 8714 self, 8715 db, 8716 op, 8717 first=None, 8718 second=None, 8719 type=None, 8720 **optional_args 8721 ):
8722 8723 self.db = db 8724 self.op = op 8725 self.first = first 8726 self.second = second 8727 self._table = getattr(first,'_table',None) 8728 ### self._tablename = first._tablename ## CHECK 8729 if not type and first and hasattr(first,'type'): 8730 self.type = first.type 8731 else: 8732 self.type = type 8733 self.optional_args = optional_args
8734
8735 - def sum(self):
8736 db = self.db 8737 return Expression(db, db._adapter.AGGREGATE, self, 'SUM', self.type)
8738
8739 - def max(self):
8740 db = self.db 8741 return Expression(db, db._adapter.AGGREGATE, self, 'MAX', self.type)
8742
8743 - def min(self):
8744 db = self.db 8745 return Expression(db, db._adapter.AGGREGATE, self, 'MIN', self.type)
8746
8747 - def len(self):
8748 db = self.db 8749 return Expression(db, db._adapter.AGGREGATE, self, 'LENGTH', 'integer')
8750
8751 - def avg(self):
8752 db = self.db 8753 return Expression(db, db._adapter.AGGREGATE, self, 'AVG', self.type)
8754
8755 - def abs(self):
8756 db = self.db 8757 return Expression(db, db._adapter.AGGREGATE, self, 'ABS', self.type)
8758
8759 - def lower(self):
8760 db = self.db 8761 return Expression(db, db._adapter.LOWER, self, None, self.type)
8762
8763 - def upper(self):
8764 db = self.db 8765 return Expression(db, db._adapter.UPPER, self, None, self.type)
8766
8767 - def year(self):
8768 db = self.db 8769 return Expression(db, db._adapter.EXTRACT, self, 'year', 'integer')
8770
8771 - def month(self):
8772 db = self.db 8773 return Expression(db, db._adapter.EXTRACT, self, 'month', 'integer')
8774
8775 - def day(self):
8776 db = self.db 8777 return Expression(db, db._adapter.EXTRACT, self, 'day', 'integer')
8778
8779 - def hour(self):
8780 db = self.db 8781 return Expression(db, db._adapter.EXTRACT, self, 'hour', 'integer')
8782
8783 - def minutes(self):
8784 db = self.db 8785 return Expression(db, db._adapter.EXTRACT, self, 'minute', 'integer')
8786
8787 - def coalesce(self,*others):
8788 db = self.db 8789 return Expression(db, db._adapter.COALESCE, self, others, self.type)
8790
8791 - def coalesce_zero(self):
8792 db = self.db 8793 return Expression(db, db._adapter.COALESCE_ZERO, self, None, self.type)
8794
8795 - def seconds(self):
8796 db = self.db 8797 return Expression(db, db._adapter.EXTRACT, self, 'second', 'integer')
8798
8799 - def epoch(self):
8800 db = self.db 8801 return Expression(db, db._adapter.EPOCH, self, None, 'integer')
8802
8803 - def __getslice__(self, start, stop):
8804 db = self.db 8805 if start < 0: 8806 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1) 8807 else: 8808 pos0 = start + 1 8809 8810 if stop < 0: 8811 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0) 8812 elif stop == sys.maxint: 8813 length = self.len() 8814 else: 8815 length = '(%s - %s)' % (stop + 1, pos0) 8816 return Expression(db,db._adapter.SUBSTRING, 8817 self, (pos0, length), self.type)
8818
8819 - def __getitem__(self, i):
8820 return self[i:i + 1]
8821
8822 - def __str__(self):
8823 return self.db._adapter.expand(self,self.type)
8824
8825 - def __or__(self, other): # for use in sortby
8826 db = self.db 8827 return Expression(db,db._adapter.COMMA,self,other,self.type)
8828
8829 - def __invert__(self):
8830 db = self.db 8831 if hasattr(self,'_op') and self.op == db._adapter.INVERT: 8832 return self.first 8833 return Expression(db,db._adapter.INVERT,self,type=self.type)
8834
8835 - def __add__(self, other):
8836 db = self.db 8837 return Expression(db,db._adapter.ADD,self,other,self.type)
8838
8839 - def __sub__(self, other):
8840 db = self.db 8841 if self.type in ('integer','bigint'): 8842 result_type = 'integer' 8843 elif self.type in ['date','time','datetime','double','float']: 8844 result_type = 'double' 8845 elif self.type.startswith('decimal('): 8846 result_type = self.type 8847 else: 8848 raise SyntaxError("subtraction operation not supported for type") 8849 return Expression(db,db._adapter.SUB,self,other,result_type)
8850
8851 - def __mul__(self, other):
8852 db = self.db 8853 return Expression(db,db._adapter.MUL,self,other,self.type)
8854
8855 - def __div__(self, other):
8856 db = self.db 8857 return Expression(db,db._adapter.DIV,self,other,self.type)
8858
8859 - def __mod__(self, other):
8860 db = self.db 8861 return Expression(db,db._adapter.MOD,self,other,self.type)
8862
8863 - def __eq__(self, value):
8864 db = self.db 8865 return Query(db, db._adapter.EQ, self, value)
8866
8867 - def __ne__(self, value):
8868 db = self.db 8869 return Query(db, db._adapter.NE, self, value)
8870
8871 - def __lt__(self, value):
8872 db = self.db 8873 return Query(db, db._adapter.LT, self, value)
8874
8875 - def __le__(self, value):
8876 db = self.db 8877 return Query(db, db._adapter.LE, self, value)
8878
8879 - def __gt__(self, value):
8880 db = self.db 8881 return Query(db, db._adapter.GT, self, value)
8882
8883 - def __ge__(self, value):
8884 db = self.db 8885 return Query(db, db._adapter.GE, self, value)
8886
8887 - def like(self, value, case_sensitive=False):
8888 db = self.db 8889 op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE 8890 return Query(db, op, self, value)
8891
8892 - def regexp(self, value):
8893 db = self.db 8894 return Query(db, db._adapter.REGEXP, self, value)
8895
8896 - def belongs(self, *value):
8897 """ 8898 Accepts the following inputs: 8899 field.belongs(1,2) 8900 field.belongs((1,2)) 8901 field.belongs(query) 8902 8903 Does NOT accept: 8904 field.belongs(1) 8905 """ 8906 db = self.db 8907 if len(value) == 1: 8908 value = value[0] 8909 if isinstance(value,Query): 8910 value = db(value)._select(value.first._table._id) 8911 return Query(db, db._adapter.BELONGS, self, value)
8912
8913 - def startswith(self, value):
8914 db = self.db 8915 if not self.type in ('string', 'text', 'json'): 8916 raise SyntaxError("startswith used with incompatible field type") 8917 return Query(db, db._adapter.STARTSWITH, self, value)
8918
8919 - def endswith(self, value):
8920 db = self.db 8921 if not self.type in ('string', 'text', 'json'): 8922 raise SyntaxError("endswith used with incompatible field type") 8923 return Query(db, db._adapter.ENDSWITH, self, value)
8924
8925 - def contains(self, value, all=False, case_sensitive=False):
8926 """ 8927 The case_sensitive parameters is only useful for PostgreSQL 8928 For other RDMBs it is ignored and contains is always case in-sensitive 8929 For MongoDB and GAE contains is always case sensitive 8930 """ 8931 db = self.db 8932 if isinstance(value,(list, tuple)): 8933 subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive) 8934 for v in value if str(v).strip()] 8935 if not subqueries: 8936 return self.contains('') 8937 else: 8938 return reduce(all and AND or OR,subqueries) 8939 if not self.type in ('string', 'text', 'json') and not self.type.startswith('list:'): 8940 raise SyntaxError("contains used with incompatible field type") 8941 return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
8942
8943 - def with_alias(self, alias):
8944 db = self.db 8945 return Expression(db, db._adapter.AS, self, alias, self.type)
8946 8947 # GIS expressions 8948
8949 - def st_asgeojson(self, precision=15, options=0, version=1):
8950 return Expression(self.db, self.db._adapter.ST_ASGEOJSON, self, 8951 dict(precision=precision, options=options, 8952 version=version), 'string')
8953
8954 - def st_astext(self):
8955 db = self.db 8956 return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
8957
8958 - def st_x(self):
8959 db = self.db 8960 return Expression(db, db._adapter.ST_X, self, type='string')
8961
8962 - def st_y(self):
8963 db = self.db 8964 return Expression(db, db._adapter.ST_Y, self, type='string')
8965
8966 - def st_distance(self, other):
8967 db = self.db 8968 return Expression(db,db._adapter.ST_DISTANCE,self,other, 'double')
8969
8970 - def st_simplify(self, value):
8971 db = self.db 8972 return Expression(db, db._adapter.ST_SIMPLIFY, self, value, self.type)
8973 8974 # GIS queries 8975
8976 - def st_contains(self, value):
8977 db = self.db 8978 return Query(db, db._adapter.ST_CONTAINS, self, value)
8979
8980 - def st_equals(self, value):
8981 db = self.db 8982 return Query(db, db._adapter.ST_EQUALS, self, value)
8983
8984 - def st_intersects(self, value):
8985 db = self.db 8986 return Query(db, db._adapter.ST_INTERSECTS, self, value)
8987
8988 - def st_overlaps(self, value):
8989 db = self.db 8990 return Query(db, db._adapter.ST_OVERLAPS, self, value)
8991
8992 - def st_touches(self, value):
8993 db = self.db 8994 return Query(db, db._adapter.ST_TOUCHES, self, value)
8995
8996 - def st_within(self, value):
8997 db = self.db 8998 return Query(db, db._adapter.ST_WITHIN, self, value)
8999
9000 # for use in both Query and sortby 9001 9002 9003 -class SQLCustomType(object):
9004 """ 9005 allows defining of custom SQL types 9006 9007 Example:: 9008 9009 decimal = SQLCustomType( 9010 type ='double', 9011 native ='integer', 9012 encoder =(lambda x: int(float(x) * 100)), 9013 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) ) 9014 ) 9015 9016 db.define_table( 9017 'example', 9018 Field('value', type=decimal) 9019 ) 9020 9021 :param type: the web2py type (default = 'string') 9022 :param native: the backend type 9023 :param encoder: how to encode the value to store it in the backend 9024 :param decoder: how to decode the value retrieved from the backend 9025 :param validator: what validators to use ( default = None, will use the 9026 default validator for type) 9027 """ 9028
9029 - def __init__( 9030 self, 9031 type='string', 9032 native=None, 9033 encoder=None, 9034 decoder=None, 9035 validator=None, 9036 _class=None, 9037 ):
9038 9039 self.type = type 9040 self.native = native 9041 self.encoder = encoder or (lambda x: x) 9042 self.decoder = decoder or (lambda x: x) 9043 self.validator = validator 9044 self._class = _class or type
9045
9046 - def startswith(self, text=None):
9047 try: 9048 return self.type.startswith(self, text) 9049 except TypeError: 9050 return False
9051
9052 - def __getslice__(self, a=0, b=100):
9053 return None
9054
9055 - def __getitem__(self, i):
9056 return None
9057
9058 - def __str__(self):
9059 return self._class
9060
9061 -class FieldVirtual(object):
9062 - def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
9063 # for backward compatibility 9064 (self.name, self.f) = (name, f) if f else ('unkown', name) 9065 self.type = ftype 9066 self.label = label or self.name.capitalize().replace('_',' ') 9067 self.represent = IDENTITY 9068 self.formatter = IDENTITY 9069 self.comment = None 9070 self.readable = True 9071 self.writable = False 9072 self.requires = None 9073 self.widget = None 9074 self.tablename = table_name 9075 self.filter_out = None
9076
9077 -class FieldMethod(object):
9078 - def __init__(self, name, f=None, handler=None):
9079 # for backward compatibility 9080 (self.name, self.f) = (name, f) if f else ('unkown', name) 9081 self.handler = handler
9082
9083 -def list_represent(x,r=None):
9084 return ', '.join(str(y) for y in x or [])
9085
9086 -class Field(Expression):
9087 9088 Virtual = FieldVirtual 9089 Method = FieldMethod 9090 Lazy = FieldMethod # for backward compatibility 9091 9092 """ 9093 an instance of this class represents a database field 9094 9095 example:: 9096 9097 a = Field(name, 'string', length=32, default=None, required=False, 9098 requires=IS_NOT_EMPTY(), ondelete='CASCADE', 9099 notnull=False, unique=False, 9100 uploadfield=True, widget=None, label=None, comment=None, 9101 uploadfield=True, # True means store on disk, 9102 # 'a_field_name' means store in this field in db 9103 # False means file content will be discarded. 9104 writable=True, readable=True, update=None, authorize=None, 9105 autodelete=False, represent=None, uploadfolder=None, 9106 uploadseparate=False # upload to separate directories by uuid_keys 9107 # first 2 character and tablename.fieldname 9108 # False - old behavior 9109 # True - put uploaded file in 9110 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2] 9111 # directory) 9112 uploadfs=None # a pyfilesystem where to store upload 9113 9114 to be used as argument of DAL.define_table 9115 9116 allowed field types: 9117 string, boolean, integer, double, text, blob, 9118 date, time, datetime, upload, password 9119 9120 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql) 9121 fields should have a default or they will be required in SQLFORMs 9122 the requires argument is used to validate the field input in SQLFORMs 9123 9124 """ 9125
9126 - def __init__( 9127 self, 9128 fieldname, 9129 type='string', 9130 length=None, 9131 default=DEFAULT, 9132 required=False, 9133 requires=DEFAULT, 9134 ondelete='CASCADE', 9135 notnull=False, 9136 unique=False, 9137 uploadfield=True, 9138 widget=None, 9139 label=None, 9140 comment=None, 9141 writable=True, 9142 readable=True, 9143 update=None, 9144 authorize=None, 9145 autodelete=False, 9146 represent=None, 9147 uploadfolder=None, 9148 uploadseparate=False, 9149 uploadfs=None, 9150 compute=None, 9151 custom_store=None, 9152 custom_retrieve=None, 9153 custom_retrieve_file_properties=None, 9154 custom_delete=None, 9155 filter_in = None, 9156 filter_out = None, 9157 custom_qualifier = None, 9158 map_none = None, 9159 ):
9160 self._db = self.db = None # both for backward compatibility 9161 self.op = None 9162 self.first = None 9163 self.second = None 9164 self.name = fieldname = cleanup(fieldname) 9165 if not isinstance(fieldname,str) or hasattr(Table,fieldname) or \ 9166 fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname): 9167 raise SyntaxError('Field: invalid field name: %s' % fieldname) 9168 self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type 9169 self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512) 9170 self.default = default if default!=DEFAULT else (update or None) 9171 self.required = required # is this field required 9172 self.ondelete = ondelete.upper() # this is for reference fields only 9173 self.notnull = notnull 9174 self.unique = unique 9175 self.uploadfield = uploadfield 9176 self.uploadfolder = uploadfolder 9177 self.uploadseparate = uploadseparate 9178 self.uploadfs = uploadfs 9179 self.widget = widget 9180 self.comment = comment 9181 self.writable = writable 9182 self.readable = readable 9183 self.update = update 9184 self.authorize = authorize 9185 self.autodelete = autodelete 9186 self.represent = list_represent if \ 9187 represent==None and type in ('list:integer','list:string') else represent 9188 self.compute = compute 9189 self.isattachment = True 9190 self.custom_store = custom_store 9191 self.custom_retrieve = custom_retrieve 9192 self.custom_retrieve_file_properties = custom_retrieve_file_properties 9193 self.custom_delete = custom_delete 9194 self.filter_in = filter_in 9195 self.filter_out = filter_out 9196 self.custom_qualifier = custom_qualifier 9197 self.label = label if label!=None else fieldname.replace('_',' ').title() 9198 self.requires = requires if requires!=None else [] 9199 self.map_none = map_none
9200
9201 - def set_attributes(self,*args,**attributes):
9202 self.__dict__.update(*args,**attributes)
9203
9204 - def clone(self,point_self_references_to=False,**args):
9205 field = copy.copy(self) 9206 if point_self_references_to and \ 9207 field.type == 'reference %s'+field._tablename: 9208 field.type = 'reference %s' % point_self_references_to 9209 field.__dict__.update(args) 9210 return field
9211
9212 - def store(self, file, filename=None, path=None):
9213 if self.custom_store: 9214 return self.custom_store(file,filename,path) 9215 if isinstance(file, cgi.FieldStorage): 9216 filename = filename or file.filename 9217 file = file.file 9218 elif not filename: 9219 filename = file.name 9220 filename = os.path.basename(filename.replace('/', os.sep)\ 9221 .replace('\\', os.sep)) 9222 m = REGEX_STORE_PATTERN.search(filename) 9223 extension = m and m.group('e') or 'txt' 9224 uuid_key = web2py_uuid().replace('-', '')[-16:] 9225 encoded_filename = base64.b16encode(filename).lower() 9226 newfilename = '%s.%s.%s.%s' % \ 9227 (self._tablename, self.name, uuid_key, encoded_filename) 9228 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension 9229 self_uploadfield = self.uploadfield 9230 if isinstance(self_uploadfield,Field): 9231 blob_uploadfield_name = self_uploadfield.uploadfield 9232 keys={self_uploadfield.name: newfilename, 9233 blob_uploadfield_name: file.read()} 9234 self_uploadfield.table.insert(**keys) 9235 elif self_uploadfield == True: 9236 if path: 9237 pass 9238 elif self.uploadfolder: 9239 path = self.uploadfolder 9240 elif self.db._adapter.folder: 9241 path = pjoin(self.db._adapter.folder, '..', 'uploads') 9242 else: 9243 raise RuntimeError( 9244 "you must specify a Field(...,uploadfolder=...)") 9245 if self.uploadseparate: 9246 if self.uploadfs: 9247 raise RuntimeError("not supported") 9248 path = pjoin(path,"%s.%s" %(self._tablename, self.name), 9249 uuid_key[:2]) 9250 if not exists(path): 9251 os.makedirs(path) 9252 pathfilename = pjoin(path, newfilename) 9253 if self.uploadfs: 9254 dest_file = self.uploadfs.open(newfilename, 'wb') 9255 else: 9256 dest_file = open(pathfilename, 'wb') 9257 try: 9258 shutil.copyfileobj(file, dest_file) 9259 except IOError: 9260 raise IOError( 9261 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename) 9262 dest_file.close() 9263 return newfilename
9264
9265 - def retrieve(self, name, path=None, nameonly=False):
9266 """ 9267 if nameonly==True return (filename, fullfilename) instead of 9268 (filename, stream) 9269 """ 9270 self_uploadfield = self.uploadfield 9271 if self.custom_retrieve: 9272 return self.custom_retrieve(name, path) 9273 import http 9274 if self.authorize or isinstance(self_uploadfield, str): 9275 row = self.db(self == name).select().first() 9276 if not row: 9277 raise http.HTTP(404) 9278 if self.authorize and not self.authorize(row): 9279 raise http.HTTP(403) 9280 m = REGEX_UPLOAD_PATTERN.match(name) 9281 if not m or not self.isattachment: 9282 raise TypeError('Can\'t retrieve %s' % name) 9283 file_properties = self.retrieve_file_properties(name,path) 9284 filename = file_properties['filename'] 9285 if isinstance(self_uploadfield, str): # ## if file is in DB 9286 stream = StringIO.StringIO(row[self_uploadfield] or '') 9287 elif isinstance(self_uploadfield,Field): 9288 blob_uploadfield_name = self_uploadfield.uploadfield 9289 query = self_uploadfield == name 9290 data = self_uploadfield.table(query)[blob_uploadfield_name] 9291 stream = StringIO.StringIO(data) 9292 elif self.uploadfs: 9293 # ## if file is on pyfilesystem 9294 stream = self.uploadfs.open(name, 'rb') 9295 else: 9296 # ## if file is on regular filesystem 9297 # this is intentially a sting with filename and not a stream 9298 # this propagates and allows stream_file_or_304_or_206 to be called 9299 fullname = pjoin(file_properties['path'],name) 9300 if nameonly: 9301 return (filename, fullname) 9302 stream = open(fullname,'rb') 9303 return (filename, stream)
9304
9305 - def retrieve_file_properties(self, name, path=None):
9306 self_uploadfield = self.uploadfield 9307 if self.custom_retrieve_file_properties: 9308 return self.custom_retrieve_file_properties(name, path) 9309 try: 9310 m = REGEX_UPLOAD_PATTERN.match(name) 9311 if not m or not self.isattachment: 9312 raise TypeError('Can\'t retrieve %s file properties' % name) 9313 filename = base64.b16decode(m.group('name'), True) 9314 filename = REGEX_CLEANUP_FN.sub('_', filename) 9315 except (TypeError, AttributeError): 9316 filename = name 9317 if isinstance(self_uploadfield, str): # ## if file is in DB 9318 return dict(path=None,filename=filename) 9319 elif isinstance(self_uploadfield,Field): 9320 return dict(path=None,filename=filename) 9321 else: 9322 # ## if file is on filesystem 9323 if path: 9324 pass 9325 elif self.uploadfolder: 9326 path = self.uploadfolder 9327 else: 9328 path = pjoin(self.db._adapter.folder, '..', 'uploads') 9329 if self.uploadseparate: 9330 t = m.group('table') 9331 f = m.group('field') 9332 u = m.group('uuidkey') 9333 path = pjoin(path,"%s.%s" % (t,f),u[:2]) 9334 return dict(path=path,filename=filename)
9335 9336
9337 - def formatter(self, value):
9338 requires = self.requires 9339 if value is None or not requires: 9340 return value or self.map_none 9341 if not isinstance(requires, (list, tuple)): 9342 requires = [requires] 9343 elif isinstance(requires, tuple): 9344 requires = list(requires) 9345 else: 9346 requires = copy.copy(requires) 9347 requires.reverse() 9348 for item in requires: 9349 if hasattr(item, 'formatter'): 9350 value = item.formatter(value) 9351 return value
9352
9353 - def validate(self, value):
9354 if not self.requires or self.requires == DEFAULT: 9355 return ((value if value!=self.map_none else None), None) 9356 requires = self.requires 9357 if not isinstance(requires, (list, tuple)): 9358 requires = [requires] 9359 for validator in requires: 9360 (value, error) = validator(value) 9361 if error: 9362 return (value, error) 9363 return ((value if value!=self.map_none else None), None)
9364
9365 - def count(self, distinct=None):
9366 return Expression(self.db, self.db._adapter.COUNT, self, distinct, 'integer')
9367
9368 - def as_dict(self, flat=False, sanitize=True, options=True):
9369 9370 attrs = ('type', 'length', 'default', 'required', 9371 'ondelete', 'notnull', 'unique', 'uploadfield', 9372 'widget', 'label', 'comment', 'writable', 'readable', 9373 'update', 'authorize', 'autodelete', 'represent', 9374 'uploadfolder', 'uploadseparate', 'uploadfs', 9375 'compute', 'custom_store', 'custom_retrieve', 9376 'custom_retrieve_file_properties', 'custom_delete', 9377 'filter_in', 'filter_out', 'custom_qualifier', 9378 'map_none', 'name') 9379 9380 SERIALIZABLE_TYPES = (int, long, basestring, dict, list, 9381 float, tuple, bool, type(None)) 9382 9383 def flatten(obj): 9384 if flat: 9385 if isinstance(obj, flatten.__class__): 9386 return str(type(obj)) 9387 elif isinstance(obj, type): 9388 try: 9389 return str(obj).split("'")[1] 9390 except IndexError: 9391 return str(obj) 9392 elif not isinstance(obj, SERIALIZABLE_TYPES): 9393 return str(obj) 9394 elif isinstance(obj, dict): 9395 newobj = dict() 9396 for k, v in obj.items(): 9397 newobj[k] = flatten(v) 9398 return newobj 9399 elif isinstance(obj, (list, tuple, set)): 9400 return [flatten(v) for v in obj] 9401 else: 9402 return obj 9403 elif isinstance(obj, (dict, set)): 9404 return obj.copy() 9405 else: return obj
9406 9407 def filter_requires(t, r, options=True): 9408 if sanitize and any([keyword in str(t).upper() for 9409 keyword in ("CRYPT", "IS_STRONG")]): 9410 return None 9411 9412 if not isinstance(r, dict): 9413 if options and hasattr(r, "options"): 9414 if callable(r.options): 9415 r.options() 9416 newr = r.__dict__.copy() 9417 else: 9418 newr = r.copy() 9419 9420 # remove options if not required 9421 if not options and newr.has_key("labels"): 9422 [newr.update({key:None}) for key in 9423 ("labels", "theset") if (key in newr)] 9424 9425 for k, v in newr.items(): 9426 if k == "other": 9427 if isinstance(v, dict): 9428 otype, other = v.popitem() 9429 else: 9430 otype = flatten(type(v)) 9431 other = v 9432 newr[k] = {otype: filter_requires(otype, other, 9433 options=options)} 9434 else: 9435 newr[k] = flatten(v) 9436 return newr
9437 9438 if isinstance(self.requires, (tuple, list, set)): 9439 requires = dict([(flatten(type(r)), 9440 filter_requires(type(r), r, 9441 options=options)) for 9442 r in self.requires]) 9443 else: 9444 requires = {flatten(type(self.requires)): 9445 filter_requires(type(self.requires), 9446 self.requires, options=options)} 9447 9448 d = dict(colname="%s.%s" % (self.tablename, self.name), 9449 requires=requires) 9450 d.update([(attr, flatten(getattr(self, attr))) for attr in attrs]) 9451 return d 9452
9453 - def as_xml(self, sanitize=True, options=True):
9454 if have_serializers: 9455 xml = serializers.xml 9456 else: 9457 raise ImportError("No xml serializers available") 9458 d = self.as_dict(flat=True, sanitize=sanitize, 9459 options=options) 9460 return xml(d)
9461
9462 - def as_json(self, sanitize=True, options=True):
9463 if have_serializers: 9464 json = serializers.json 9465 else: 9466 raise ImportError("No json serializers available") 9467 d = self.as_dict(flat=True, sanitize=sanitize, 9468 options=options) 9469 return json(d)
9470
9471 - def as_yaml(self, sanitize=True, options=True):
9472 if have_serializers: 9473 d = self.as_dict(flat=True, sanitize=sanitize, 9474 options=options) 9475 return serializers.yaml(d) 9476 else: 9477 raise ImportError("No YAML serializers available")
9478
9479 - def __nonzero__(self):
9480 return True
9481
9482 - def __str__(self):
9483 try: 9484 return '%s.%s' % (self.tablename, self.name) 9485 except: 9486 return '<no table>.%s' % self.name
9487
9488 9489 -class Query(object):
9490 9491 """ 9492 a query object necessary to define a set. 9493 it can be stored or can be passed to DAL.__call__() to obtain a Set 9494 9495 Example:: 9496 9497 query = db.users.name=='Max' 9498 set = db(query) 9499 records = set.select() 9500 9501 """ 9502
9503 - def __init__( 9504 self, 9505 db, 9506 op, 9507 first=None, 9508 second=None, 9509 ignore_common_filters = False, 9510 **optional_args 9511 ):
9512 self.db = self._db = db 9513 self.op = op 9514 self.first = first 9515 self.second = second 9516 self.ignore_common_filters = ignore_common_filters 9517 self.optional_args = optional_args
9518
9519 - def __repr__(self):
9520 return '<Query %s>' % BaseAdapter.expand(self.db._adapter,self)
9521
9522 - def __str__(self):
9523 return self.db._adapter.expand(self)
9524
9525 - def __and__(self, other):
9526 return Query(self.db,self.db._adapter.AND,self,other)
9527
9528 - def __or__(self, other):
9529 return Query(self.db,self.db._adapter.OR,self,other)
9530
9531 - def __invert__(self):
9532 if self.op==self.db._adapter.NOT: 9533 return self.first 9534 return Query(self.db,self.db._adapter.NOT,self)
9535
9536 - def __eq__(self, other):
9537 return repr(self) == repr(other)
9538
9539 - def __ne__(self, other):
9540 return not (self == other)
9541
9542 - def case(self,t=1,f=0):
9543 return self.db._adapter.CASE(self,t,f)
9544
9545 - def as_dict(self, flat=False, sanitize=True):
9546 """Experimental stuff 9547 9548 This allows to return a plain dictionary with the basic 9549 query representation. Can be used with json/xml services 9550 for client-side db I/O 9551 9552 Example: 9553 >>> q = db.auth_user.id != 0 9554 >>> q.as_dict(flat=True) 9555 {"op": "NE", "first":{"tablename": "auth_user", 9556 "fieldname": "id"}, 9557 "second":0} 9558 """ 9559 9560 SERIALIZABLE_TYPES = (tuple, dict, list, int, long, float, 9561 basestring, type(None), bool) 9562 def loop(d): 9563 newd = dict() 9564 for k, v in d.items(): 9565 if k in ("first", "second"): 9566 if isinstance(v, self.__class__): 9567 newd[k] = loop(v.__dict__) 9568 elif isinstance(v, Field): 9569 newd[k] = {"tablename": v._tablename, 9570 "fieldname": v.name} 9571 elif isinstance(v, Expression): 9572 newd[k] = loop(v.__dict__) 9573 elif isinstance(v, SERIALIZABLE_TYPES): 9574 newd[k] = v 9575 elif isinstance(v, (datetime.date, 9576 datetime.time, 9577 datetime.datetime)): 9578 newd[k] = unicode(v) 9579 elif k == "op": 9580 if callable(v): 9581 newd[k] = v.__name__ 9582 elif isinstance(v, basestring): 9583 newd[k] = v 9584 else: pass # not callable or string 9585 elif isinstance(v, SERIALIZABLE_TYPES): 9586 if isinstance(v, dict): 9587 newd[k] = loop(v) 9588 else: newd[k] = v 9589 return newd
9590 9591 if flat: 9592 return loop(self.__dict__) 9593 else: return self.__dict__
9594 9595
9596 - def as_xml(self, sanitize=True):
9597 if have_serializers: 9598 xml = serializers.xml 9599 else: 9600 raise ImportError("No xml serializers available") 9601 d = self.as_dict(flat=True, sanitize=sanitize) 9602 return xml(d)
9603
9604 - def as_json(self, sanitize=True):
9605 if have_serializers: 9606 json = serializers.json 9607 else: 9608 raise ImportError("No json serializers available") 9609 d = self.as_dict(flat=True, sanitize=sanitize) 9610 return json(d)
9611
9612 -def xorify(orderby):
9613 if not orderby: 9614 return None 9615 orderby2 = orderby[0] 9616 for item in orderby[1:]: 9617 orderby2 = orderby2 | item 9618 return orderby2
9619
9620 -def use_common_filters(query):
9621 return (query and hasattr(query,'ignore_common_filters') and \ 9622 not query.ignore_common_filters)
9623
9624 -class Set(object):
9625 9626 """ 9627 a Set represents a set of records in the database, 9628 the records are identified by the query=Query(...) object. 9629 normally the Set is generated by DAL.__call__(Query(...)) 9630 9631 given a set, for example 9632 set = db(db.users.name=='Max') 9633 you can: 9634 set.update(db.users.name='Massimo') 9635 set.delete() # all elements in the set 9636 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10)) 9637 and take subsets: 9638 subset = set(db.users.id<5) 9639 """ 9640
9641 - def __init__(self, db, query, ignore_common_filters = None):
9642 self.db = db 9643 self._db = db # for backward compatibility 9644 self.dquery = None 9645 9646 # if query is a dict, parse it 9647 if isinstance(query, dict): 9648 query = self.parse(query) 9649 9650 if not ignore_common_filters is None and \ 9651 use_common_filters(query) == ignore_common_filters: 9652 query = copy.copy(query) 9653 query.ignore_common_filters = ignore_common_filters 9654 self.query = query
9655
9656 - def __repr__(self):
9657 return '<Set %s>' % BaseAdapter.expand(self.db._adapter,self.query)
9658
9659 - def __call__(self, query, ignore_common_filters=False):
9660 if isinstance(query,Table): 9661 query = self.db._adapter.id_query(query) 9662 elif isinstance(query,str): 9663 query = Expression(self.db,query) 9664 elif isinstance(query,Field): 9665 query = query!=None 9666 if self.query: 9667 return Set(self.db, self.query & query, 9668 ignore_common_filters=ignore_common_filters) 9669 else: 9670 return Set(self.db, query, 9671 ignore_common_filters=ignore_common_filters)
9672
9673 - def _count(self,distinct=None):
9674 return self.db._adapter._count(self.query,distinct)
9675
9676 - def _select(self, *fields, **attributes):
9677 adapter = self.db._adapter 9678 tablenames = adapter.tables(self.query, 9679 attributes.get('join',None), 9680 attributes.get('left',None), 9681 attributes.get('orderby',None), 9682 attributes.get('groupby',None)) 9683 fields = adapter.expand_all(fields, tablenames) 9684 return adapter._select(self.query,fields,attributes)
9685
9686 - def _delete(self):
9687 db = self.db 9688 tablename = db._adapter.get_table(self.query) 9689 return db._adapter._delete(tablename,self.query)
9690
9691 - def _update(self, **update_fields):
9692 db = self.db 9693 tablename = db._adapter.get_table(self.query) 9694 fields = db[tablename]._listify(update_fields,update=True) 9695 return db._adapter._update(tablename,self.query,fields)
9696
9697 - def as_dict(self, flat=False, sanitize=True):
9698 if flat: 9699 uid = dbname = uri = None 9700 codec = self.db._db_codec 9701 if not sanitize: 9702 uri, dbname, uid = (self.db._dbname, str(self.db), 9703 self.db._db_uid) 9704 d = {"query": self.query.as_dict(flat=flat)} 9705 d["db"] = {"uid": uid, "codec": codec, 9706 "name": dbname, "uri": uri} 9707 return d 9708 else: return self.__dict__
9709
9710 - def as_xml(self, sanitize=True):
9711 if have_serializers: 9712 xml = serializers.xml 9713 else: 9714 raise ImportError("No xml serializers available") 9715 d = self.as_dict(flat=True, sanitize=sanitize) 9716 return xml(d)
9717
9718 - def as_json(self, sanitize=True):
9719 if have_serializers: 9720 json = serializers.json 9721 else: 9722 raise ImportError("No json serializers available") 9723 d = self.as_dict(flat=True, sanitize=sanitize) 9724 return json(d)
9725
9726 - def parse(self, dquery):
9727 "Experimental: Turn a dictionary into a Query object" 9728 self.dquery = dquery 9729 return self.build(self.dquery)
9730
9731 - def build(self, d):
9732 "Experimental: see .parse()" 9733 op, first, second = (d["op"], d["first"], 9734 d.get("second", None)) 9735 left = right = built = None 9736 9737 if op in ("AND", "OR"): 9738 if not (type(first), type(second)) == (dict, dict): 9739 raise SyntaxError("Invalid AND/OR query") 9740 if op == "AND": 9741 built = self.build(first) & self.build(second) 9742 else: built = self.build(first) | self.build(second) 9743 9744 elif op == "NOT": 9745 if first is None: 9746 raise SyntaxError("Invalid NOT query") 9747 built = ~self.build(first) 9748 else: 9749 # normal operation (GT, EQ, LT, ...) 9750 for k, v in {"left": first, "right": second}.items(): 9751 if isinstance(v, dict) and v.get("op"): 9752 v = self.build(v) 9753 if isinstance(v, dict) and ("tablename" in v): 9754 v = self.db[v["tablename"]][v["fieldname"]] 9755 if k == "left": left = v 9756 else: right = v 9757 9758 if hasattr(self.db._adapter, op): 9759 opm = getattr(self.db._adapter, op) 9760 9761 if op == "EQ": built = left == right 9762 elif op == "NE": built = left != right 9763 elif op == "GT": built = left > right 9764 elif op == "GE": built = left >= right 9765 elif op == "LT": built = left < right 9766 elif op == "LE": built = left <= right 9767 elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"): 9768 built = Expression(self.db, opm) 9769 elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY", 9770 "COALESCE_ZERO", "RAW", "INVERT"): 9771 built = Expression(self.db, opm, left) 9772 elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING", 9773 "REGEXP", "LIKE", "ILIKE", "STARTSWITH", 9774 "ENDSWITH", "ADD", "SUB", "MUL", "DIV", 9775 "MOD", "AS", "ON", "COMMA", "NOT_NULL", 9776 "COALESCE", "CONTAINS", "BELONGS"): 9777 built = Expression(self.db, opm, left, right) 9778 # expression as string 9779 elif not (left or right): built = Expression(self.db, op) 9780 else: 9781 raise SyntaxError("Operator not supported: %s" % op) 9782 9783 return built
9784
9785 - def isempty(self):
9786 return not self.select(limitby=(0,1))
9787
9788 - def count(self,distinct=None, cache=None):
9789 db = self.db 9790 if cache: 9791 cache_model, time_expire = cache 9792 sql = self._count(distinct=distinct) 9793 key = db._uri + '/' + sql 9794 if len(key)>200: key = hashlib_md5(key).hexdigest() 9795 return cache_model( 9796 key, 9797 (lambda self=self,distinct=distinct: \ 9798 db._adapter.count(self.query,distinct)), 9799 time_expire) 9800 return db._adapter.count(self.query,distinct)
9801
9802 - def select(self, *fields, **attributes):
9803 adapter = self.db._adapter 9804 tablenames = adapter.tables(self.query, 9805 attributes.get('join',None), 9806 attributes.get('left',None), 9807 attributes.get('orderby',None), 9808 attributes.get('groupby',None)) 9809 fields = adapter.expand_all(fields, tablenames) 9810 return adapter.select(self.query,fields,attributes)
9811
9812 - def nested_select(self,*fields,**attributes):
9813 return Expression(self.db,self._select(*fields,**attributes))
9814
9815 - def delete(self):
9816 db = self.db 9817 tablename = db._adapter.get_table(self.query) 9818 table = db[tablename] 9819 if any(f(self) for f in table._before_delete): return 0 9820 ret = db._adapter.delete(tablename,self.query) 9821 ret and [f(self) for f in table._after_delete] 9822 return ret
9823
9824 - def update(self, **update_fields):
9825 db = self.db 9826 tablename = db._adapter.get_table(self.query) 9827 table = db[tablename] 9828 table._attempt_upload(update_fields) 9829 if any(f(self,update_fields) for f in table._before_update): 9830 return 0 9831 fields = table._listify(update_fields,update=True) 9832 if not fields: 9833 raise SyntaxError("No fields to update") 9834 ret = db._adapter.update(tablename,self.query,fields) 9835 ret and [f(self,update_fields) for f in table._after_update] 9836 return ret
9837
9838 - def update_naive(self, **update_fields):
9839 """ 9840 same as update but does not call table._before_update and _after_update 9841 """ 9842 tablename = self.db._adapter.get_table(self.query) 9843 table = self.db[tablename] 9844 fields = table._listify(update_fields,update=True) 9845 if not fields: raise SyntaxError("No fields to update") 9846 ret = self.db._adapter.update(tablename,self.query,fields) 9847 return ret
9848
9849 - def validate_and_update(self, **update_fields):
9850 tablename = self.db._adapter.get_table(self.query) 9851 response = Row() 9852 response.errors = Row() 9853 new_fields = copy.copy(update_fields) 9854 for key,value in update_fields.iteritems(): 9855 value,error = self.db[tablename][key].validate(value) 9856 if error: 9857 response.errors[key] = error 9858 else: 9859 new_fields[key] = value 9860 table = self.db[tablename] 9861 if response.errors: 9862 response.updated = None 9863 else: 9864 if not any(f(self,new_fields) for f in table._before_update): 9865 fields = table._listify(new_fields,update=True) 9866 if not fields: raise SyntaxError("No fields to update") 9867 ret = self.db._adapter.update(tablename,self.query,fields) 9868 ret and [f(self,new_fields) for f in table._after_update] 9869 else: 9870 ret = 0 9871 response.updated = ret 9872 return response
9873
9874 - def delete_uploaded_files(self, upload_fields=None):
9875 table = self.db[self.db._adapter.tables(self.query)[0]] 9876 # ## mind uploadfield==True means file is not in DB 9877 if upload_fields: 9878 fields = upload_fields.keys() 9879 else: 9880 fields = table.fields 9881 fields = [f for f in fields if table[f].type == 'upload' 9882 and table[f].uploadfield == True 9883 and table[f].autodelete] 9884 if not fields: 9885 return False 9886 for record in self.select(*[table[f] for f in fields]): 9887 for fieldname in fields: 9888 field = table[fieldname] 9889 oldname = record.get(fieldname, None) 9890 if not oldname: 9891 continue 9892 if upload_fields and oldname == upload_fields[fieldname]: 9893 continue 9894 if field.custom_delete: 9895 field.custom_delete(oldname) 9896 else: 9897 uploadfolder = field.uploadfolder 9898 if not uploadfolder: 9899 uploadfolder = pjoin( 9900 self.db._adapter.folder, '..', 'uploads') 9901 if field.uploadseparate: 9902 items = oldname.split('.') 9903 uploadfolder = pjoin( 9904 uploadfolder, 9905 "%s.%s" % (items[0], items[1]), 9906 items[2][:2]) 9907 oldpath = pjoin(uploadfolder, oldname) 9908 if exists(oldpath): 9909 os.unlink(oldpath) 9910 return False
9911
9912 -class RecordUpdater(object):
9913 - def __init__(self, colset, table, id):
9914 self.colset, self.db, self.tablename, self.id = \ 9915 colset, table._db, table._tablename, id
9916
9917 - def __call__(self, **fields):
9918 colset, db, tablename, id = self.colset, self.db, self.tablename, self.id 9919 table = db[tablename] 9920 newfields = fields or dict(colset) 9921 for fieldname in newfields.keys(): 9922 if not fieldname in table.fields or table[fieldname].type=='id': 9923 del newfields[fieldname] 9924 table._db(table._id==id,ignore_common_filters=True).update(**newfields) 9925 colset.update(newfields) 9926 return colset
9927
9928 -class RecordDeleter(object):
9929 - def __init__(self, table, id):
9930 self.db, self.tablename, self.id = table._db, table._tablename, id
9931 - def __call__(self):
9932 return self.db(self.db[self.tablename]._id==self.id).delete()
9933
9934 -class LazySet(object):
9935 - def __init__(self, field, id):
9936 self.db, self.tablename, self.fieldname, self.id = \ 9937 field.db, field._tablename, field.name, id
9938 - def _getset(self):
9939 query = self.db[self.tablename][self.fieldname]==self.id 9940 return Set(self.db,query)
9941 - def __repr__(self):
9942 return repr(self._getset())
9943 - def __call__(self, query, ignore_common_filters=False):
9944 return self._getset()(query, ignore_common_filters)
9945 - def _count(self,distinct=None):
9946 return self._getset()._count(distinct)
9947 - def _select(self, *fields, **attributes):
9948 return self._getset()._select(*fields,**attributes)
9949 - def _delete(self):
9950 return self._getset()._delete()
9951 - def _update(self, **update_fields):
9952 return self._getset()._update(**update_fields)
9953 - def isempty(self):
9954 return self._getset().isempty()
9955 - def count(self,distinct=None, cache=None):
9956 return self._getset().count(distinct,cache)
9957 - def select(self, *fields, **attributes):
9958 return self._getset().select(*fields,**attributes)
9959 - def nested_select(self,*fields,**attributes):
9960 return self._getset().nested_select(*fields,**attributes)
9961 - def delete(self):
9962 return self._getset().delete()
9963 - def update(self, **update_fields):
9964 return self._getset().update(**update_fields)
9965 - def update_naive(self, **update_fields):
9966 return self._getset().update_naive(**update_fields)
9967 - def validate_and_update(self, **update_fields):
9968 return self._getset().validate_and_update(**update_fields)
9969 - def delete_uploaded_files(self, upload_fields=None):
9970 return self._getset().delete_uploaded_files(upload_fields)
9971
9972 -class VirtualCommand(object):
9973 - def __init__(self,method,row):
9974 self.method=method 9975 self.row=row
9976 - def __call__(self,*args,**kwargs):
9977 return self.method(self.row,*args,**kwargs)
9978
9979 -def lazy_virtualfield(f):
9980 f.__lazy__ = True 9981 return f
9982
9983 -class Rows(object):
9984 9985 """ 9986 A wrapper for the return value of a select. It basically represents a table. 9987 It has an iterator and each row is represented as a dictionary. 9988 """ 9989 9990 # ## TODO: this class still needs some work to care for ID/OID 9991
9992 - def __init__( 9993 self, 9994 db=None, 9995 records=[], 9996 colnames=[], 9997 compact=True, 9998 rawrows=None 9999 ):
10000 self.db = db 10001 self.records = records 10002 self.colnames = colnames 10003 self.compact = compact 10004 self.response = rawrows
10005
10006 - def __repr__(self):
10007 return '<Rows (%s)>' % len(self.records)
10008
10009 - def setvirtualfields(self,**keyed_virtualfields):
10010 """ 10011 db.define_table('x',Field('number','integer')) 10012 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)] 10013 10014 from gluon.dal import lazy_virtualfield 10015 10016 class MyVirtualFields(object): 10017 # normal virtual field (backward compatible, discouraged) 10018 def normal_shift(self): return self.x.number+1 10019 # lazy virtual field (because of @staticmethod) 10020 @lazy_virtualfield 10021 def lazy_shift(instance,row,delta=4): return row.x.number+delta 10022 db.x.virtualfields.append(MyVirtualFields()) 10023 10024 for row in db(db.x).select(): 10025 print row.number, row.normal_shift, row.lazy_shift(delta=7) 10026 """ 10027 if not keyed_virtualfields: 10028 return self 10029 for row in self.records: 10030 for (tablename,virtualfields) in keyed_virtualfields.iteritems(): 10031 attributes = dir(virtualfields) 10032 if not tablename in row: 10033 box = row[tablename] = Row() 10034 else: 10035 box = row[tablename] 10036 updated = False 10037 for attribute in attributes: 10038 if attribute[0] != '_': 10039 method = getattr(virtualfields,attribute) 10040 if hasattr(method,'__lazy__'): 10041 box[attribute]=VirtualCommand(method,row) 10042 elif type(method)==types.MethodType: 10043 if not updated: 10044 virtualfields.__dict__.update(row) 10045 updated = True 10046 box[attribute]=method() 10047 return self
10048
10049 - def __and__(self,other):
10050 if self.colnames!=other.colnames: 10051 raise Exception('Cannot & incompatible Rows objects') 10052 records = self.records+other.records 10053 return Rows(self.db,records,self.colnames)
10054
10055 - def __or__(self,other):
10056 if self.colnames!=other.colnames: 10057 raise Exception('Cannot | incompatible Rows objects') 10058 records = self.records 10059 records += [record for record in other.records \ 10060 if not record in records] 10061 return Rows(self.db,records,self.colnames)
10062
10063 - def __nonzero__(self):
10064 if len(self.records): 10065 return 1 10066 return 0
10067
10068 - def __len__(self):
10069 return len(self.records)
10070
10071 - def __getslice__(self, a, b):
10072 return Rows(self.db,self.records[a:b],self.colnames)
10073
10074 - def __getitem__(self, i):
10075 row = self.records[i] 10076 keys = row.keys() 10077 if self.compact and len(keys) == 1 and keys[0] != '_extra': 10078 return row[row.keys()[0]] 10079 return row
10080
10081 - def __iter__(self):
10082 """ 10083 iterator over records 10084 """ 10085 10086 for i in xrange(len(self)): 10087 yield self[i]
10088
10089 - def __str__(self):
10090 """ 10091 serializes the table into a csv file 10092 """ 10093 10094 s = StringIO.StringIO() 10095 self.export_to_csv_file(s) 10096 return s.getvalue()
10097
10098 - def first(self):
10099 if not self.records: 10100 return None 10101 return self[0]
10102
10103 - def last(self):
10104 if not self.records: 10105 return None 10106 return self[-1]
10107
10108 - def find(self,f,limitby=None):
10109 """ 10110 returns a new Rows object, a subset of the original object, 10111 filtered by the function f 10112 """ 10113 if not self: 10114 return Rows(self.db, [], self.colnames) 10115 records = [] 10116 if limitby: 10117 a,b = limitby 10118 else: 10119 a,b = 0,len(self) 10120 k = 0 10121 for row in self: 10122 if f(row): 10123 if a<=k: records.append(row) 10124 k += 1 10125 if k==b: break 10126 return Rows(self.db, records, self.colnames)
10127
10128 - def exclude(self, f):
10129 """ 10130 removes elements from the calling Rows object, filtered by the function f, 10131 and returns a new Rows object containing the removed elements 10132 """ 10133 if not self.records: 10134 return Rows(self.db, [], self.colnames) 10135 removed = [] 10136 i=0 10137 while i<len(self): 10138 row = self[i] 10139 if f(row): 10140 removed.append(self.records[i]) 10141 del self.records[i] 10142 else: 10143 i += 1 10144 return Rows(self.db, removed, self.colnames)
10145
10146 - def sort(self, f, reverse=False):
10147 """ 10148 returns a list of sorted elements (not sorted in place) 10149 """ 10150 rows = Rows(self.db,[],self.colnames,compact=False) 10151 rows.records = sorted(self,key=f,reverse=reverse) 10152 return rows
10153 10154
10155 - def group_by_value(self, field):
10156 """ 10157 regroups the rows, by one of the fields 10158 """ 10159 if not self.records: 10160 return {} 10161 key = str(field) 10162 grouped_row_group = dict() 10163 10164 for row in self: 10165 value = row[key] 10166 if not value in grouped_row_group: 10167 grouped_row_group[value] = [row] 10168 else: 10169 grouped_row_group[value].append(row) 10170 return grouped_row_group
10171
10172 - def as_list(self, 10173 compact=True, 10174 storage_to_dict=True, 10175 datetime_to_str=True, 10176 custom_types=None):
10177 """ 10178 returns the data as a list or dictionary. 10179 :param storage_to_dict: when True returns a dict, otherwise a list(default True) 10180 :param datetime_to_str: convert datetime fields as strings (default True) 10181 """ 10182 (oc, self.compact) = (self.compact, compact) 10183 if storage_to_dict: 10184 items = [item.as_dict(datetime_to_str, custom_types) for item in self] 10185 else: 10186 items = [item for item in self] 10187 self.compact = compact 10188 return items
10189 10190
10191 - def as_dict(self, 10192 key='id', 10193 compact=True, 10194 storage_to_dict=True, 10195 datetime_to_str=True, 10196 custom_types=None):
10197 """ 10198 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False) 10199 10200 :param key: the name of the field to be used as dict key, normally the id 10201 :param compact: ? (default True) 10202 :param storage_to_dict: when True returns a dict, otherwise a list(default True) 10203 :param datetime_to_str: convert datetime fields as strings (default True) 10204 """ 10205 10206 # test for multiple rows 10207 multi = False 10208 f = self.first() 10209 if f: 10210 multi = any([isinstance(v, f.__class__) for v in f.values()]) 10211 if (not "." in key) and multi: 10212 # No key provided, default to int indices 10213 def new_key(): 10214 i = 0 10215 while True: 10216 yield i 10217 i += 1
10218 key_generator = new_key() 10219 key = lambda r: key_generator.next() 10220 10221 rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types) 10222 if isinstance(key,str) and key.count('.')==1: 10223 (table, field) = key.split('.') 10224 return dict([(r[table][field],r) for r in rows]) 10225 elif isinstance(key,str): 10226 return dict([(r[key],r) for r in rows]) 10227 else: 10228 return dict([(key(r),r) for r in rows])
10229
10230 - def export_to_csv_file(self, ofile, null='<NULL>', *args, **kwargs):
10231 """ 10232 export data to csv, the first line contains the column names 10233 10234 :param ofile: where the csv must be exported to 10235 :param null: how null values must be represented (default '<NULL>') 10236 :param delimiter: delimiter to separate values (default ',') 10237 :param quotechar: character to use to quote string values (default '"') 10238 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL) 10239 :param represent: use the fields .represent value (default False) 10240 :param colnames: list of column names to use (default self.colnames) 10241 This will only work when exporting rows objects!!!! 10242 DO NOT use this with db.export_to_csv() 10243 """ 10244 delimiter = kwargs.get('delimiter', ',') 10245 quotechar = kwargs.get('quotechar', '"') 10246 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL) 10247 represent = kwargs.get('represent', False) 10248 writer = csv.writer(ofile, delimiter=delimiter, 10249 quotechar=quotechar, quoting=quoting) 10250 colnames = kwargs.get('colnames', self.colnames) 10251 write_colnames = kwargs.get('write_colnames',True) 10252 # a proper csv starting with the column names 10253 if write_colnames: 10254 writer.writerow(colnames) 10255 10256 def none_exception(value): 10257 """ 10258 returns a cleaned up value that can be used for csv export: 10259 - unicode text is encoded as such 10260 - None values are replaced with the given representation (default <NULL>) 10261 """ 10262 if value is None: 10263 return null 10264 elif isinstance(value, unicode): 10265 return value.encode('utf8') 10266 elif isinstance(value,Reference): 10267 return int(value) 10268 elif hasattr(value, 'isoformat'): 10269 return value.isoformat()[:19].replace('T', ' ') 10270 elif isinstance(value, (list,tuple)): # for type='list:..' 10271 return bar_encode(value) 10272 return value
10273 10274 for record in self: 10275 row = [] 10276 for col in colnames: 10277 if not REGEX_TABLE_DOT_FIELD.match(col): 10278 row.append(record._extra[col]) 10279 else: 10280 (t, f) = col.split('.') 10281 field = self.db[t][f] 10282 if isinstance(record.get(t, None), (Row,dict)): 10283 value = record[t][f] 10284 else: 10285 value = record[f] 10286 if field.type=='blob' and not value is None: 10287 value = base64.b64encode(value) 10288 elif represent and field.represent: 10289 value = field.represent(value) 10290 row.append(none_exception(value)) 10291 writer.writerow(row) 10292
10293 - def xml(self,strict=False,row_name='row',rows_name='rows'):
10294 """ 10295 serializes the table using sqlhtml.SQLTABLE (if present) 10296 """ 10297 10298 if strict: 10299 ncols = len(self.colnames) 10300 return '<%s>\n%s\n</%s>' % (rows_name, 10301 '\n'.join(row.as_xml(row_name=row_name, 10302 colnames=self.colnames) for 10303 row in self), rows_name) 10304 10305 import sqlhtml 10306 return sqlhtml.SQLTABLE(self).xml()
10307
10308 - def as_xml(self,row_name='row',rows_name='rows'):
10309 return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
10310
10311 - def as_json(self, mode='object', default=None):
10312 """ 10313 serializes the table to a JSON list of objects 10314 """ 10315 10316 items = [record.as_json(mode=mode, default=default, 10317 serialize=False, 10318 colnames=self.colnames) for 10319 record in self] 10320 10321 if have_serializers: 10322 return serializers.json(items, 10323 default=default or 10324 serializers.custom_json) 10325 elif simplejson: 10326 return simplejson.dumps(items) 10327 else: 10328 raise RuntimeError("missing simplejson")
10329 10330 # for consistent naming yet backwards compatible 10331 as_csv = __str__ 10332 json = as_json 10333
10334 ################################################################################ 10335 # dummy function used to define some doctests 10336 ################################################################################ 10337 10338 -def test_all():
10339 """ 10340 10341 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\") 10342 >>> if len(sys.argv)>1: db = DAL(sys.argv[1]) 10343 >>> tmp = db.define_table('users',\ 10344 Field('stringf', 'string', length=32, required=True),\ 10345 Field('booleanf', 'boolean', default=False),\ 10346 Field('passwordf', 'password', notnull=True),\ 10347 Field('uploadf', 'upload'),\ 10348 Field('blobf', 'blob'),\ 10349 Field('integerf', 'integer', unique=True),\ 10350 Field('doublef', 'double', unique=True,notnull=True),\ 10351 Field('jsonf', 'json'),\ 10352 Field('datef', 'date', default=datetime.date.today()),\ 10353 Field('timef', 'time'),\ 10354 Field('datetimef', 'datetime'),\ 10355 migrate='test_user.table') 10356 10357 Insert a field 10358 10359 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\ 10360 uploadf=None, integerf=5, doublef=3.14,\ 10361 jsonf={"j": True},\ 10362 datef=datetime.date(2001, 1, 1),\ 10363 timef=datetime.time(12, 30, 15),\ 10364 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15)) 10365 1 10366 10367 Drop the table 10368 10369 >>> db.users.drop() 10370 10371 Examples of insert, select, update, delete 10372 10373 >>> tmp = db.define_table('person',\ 10374 Field('name'),\ 10375 Field('birth','date'),\ 10376 migrate='test_person.table') 10377 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22') 10378 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21') 10379 10380 commented len(db().select(db.person.ALL)) 10381 commented 2 10382 10383 >>> me = db(db.person.id==person_id).select()[0] # test select 10384 >>> me.name 10385 'Massimo' 10386 >>> db.person[2].name 10387 'Massimo' 10388 >>> db.person(2).name 10389 'Massimo' 10390 >>> db.person(name='Massimo').name 10391 'Massimo' 10392 >>> db.person(db.person.name=='Massimo').name 10393 'Massimo' 10394 >>> row = db.person[2] 10395 >>> row.name == row['name'] == row['person.name'] == row('person.name') 10396 True 10397 >>> db(db.person.name=='Massimo').update(name='massimo') # test update 10398 1 10399 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete 10400 1 10401 10402 Update a single record 10403 10404 >>> me.update_record(name=\"Max\") 10405 <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}> 10406 >>> me.name 10407 'Max' 10408 10409 Examples of complex search conditions 10410 10411 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select()) 10412 1 10413 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select()) 10414 1 10415 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select()) 10416 1 10417 >>> me = db(db.person.id==person_id).select(db.person.name)[0] 10418 >>> me.name 10419 'Max' 10420 10421 Examples of search conditions using extract from date/datetime/time 10422 10423 >>> len(db(db.person.birth.month()==12).select()) 10424 1 10425 >>> len(db(db.person.birth.year()>1900).select()) 10426 1 10427 10428 Example of usage of NULL 10429 10430 >>> len(db(db.person.birth==None).select()) ### test NULL 10431 0 10432 >>> len(db(db.person.birth!=None).select()) ### test NULL 10433 1 10434 10435 Examples of search conditions using lower, upper, and like 10436 10437 >>> len(db(db.person.name.upper()=='MAX').select()) 10438 1 10439 >>> len(db(db.person.name.like('%ax')).select()) 10440 1 10441 >>> len(db(db.person.name.upper().like('%AX')).select()) 10442 1 10443 >>> len(db(~db.person.name.upper().like('%AX')).select()) 10444 0 10445 10446 orderby, groupby and limitby 10447 10448 >>> people = db().select(db.person.name, orderby=db.person.name) 10449 >>> order = db.person.name|~db.person.birth 10450 >>> people = db().select(db.person.name, orderby=order) 10451 10452 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name) 10453 10454 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100)) 10455 10456 Example of one 2 many relation 10457 10458 >>> tmp = db.define_table('dog',\ 10459 Field('name'),\ 10460 Field('birth','date'),\ 10461 Field('owner',db.person),\ 10462 migrate='test_dog.table') 10463 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id) 10464 1 10465 10466 A simple JOIN 10467 10468 >>> len(db(db.dog.owner==db.person.id).select()) 10469 1 10470 10471 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id))) 10472 1 10473 10474 Drop tables 10475 10476 >>> db.dog.drop() 10477 >>> db.person.drop() 10478 10479 Example of many 2 many relation and Set 10480 10481 >>> tmp = db.define_table('author', Field('name'),\ 10482 migrate='test_author.table') 10483 >>> tmp = db.define_table('paper', Field('title'),\ 10484 migrate='test_paper.table') 10485 >>> tmp = db.define_table('authorship',\ 10486 Field('author_id', db.author),\ 10487 Field('paper_id', db.paper),\ 10488 migrate='test_authorship.table') 10489 >>> aid = db.author.insert(name='Massimo') 10490 >>> pid = db.paper.insert(title='QCD') 10491 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid) 10492 10493 Define a Set 10494 10495 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id)) 10496 >>> rows = authored_papers.select(db.author.name, db.paper.title) 10497 >>> for row in rows: print row.author.name, row.paper.title 10498 Massimo QCD 10499 10500 Example of search condition using belongs 10501 10502 >>> set = (1, 2, 3) 10503 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL) 10504 >>> print rows[0].title 10505 QCD 10506 10507 Example of search condition using nested select 10508 10509 >>> nested_select = db()._select(db.authorship.paper_id) 10510 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL) 10511 >>> print rows[0].title 10512 QCD 10513 10514 Example of expressions 10515 10516 >>> mynumber = db.define_table('mynumber', Field('x', 'integer')) 10517 >>> db(mynumber).delete() 10518 0 10519 >>> for i in range(10): tmp = mynumber.insert(x=i) 10520 >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum()) 10521 45 10522 10523 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2) 10524 5 10525 10526 Output in csv 10527 10528 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip() 10529 author.name,paper.title\r 10530 Massimo,QCD 10531 10532 Delete all leftover tables 10533 10534 >>> DAL.distributed_transaction_commit(db) 10535 10536 >>> db.mynumber.drop() 10537 >>> db.authorship.drop() 10538 >>> db.author.drop() 10539 >>> db.paper.drop() 10540 """
10541 ################################################################################ 10542 # deprecated since the new DAL; here only for backward compatibility 10543 ################################################################################ 10544 10545 SQLField = Field 10546 SQLTable = Table 10547 SQLXorable = Expression 10548 SQLQuery = Query 10549 SQLSet = Set 10550 SQLRows = Rows 10551 SQLStorage = Row 10552 SQLDB = DAL 10553 GQLDB = DAL 10554 DAL.Field = Field # was necessary in gluon/globals.py session.connect 10555 DAL.Table = Table # was necessary in gluon/globals.py session.connect
10556 10557 ################################################################################ 10558 # Geodal utils 10559 ################################################################################ 10560 10561 -def geoPoint(x,y):
10562 return "POINT (%f %f)" % (x,y)
10563
10564 -def geoLine(*line):
10565 return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
10566
10567 -def geoPolygon(*line):
10568 return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
10569 10570 ################################################################################ 10571 # run tests 10572 ################################################################################ 10573 10574 if __name__ == '__main__': 10575 import doctest 10576 doctest.testmod() 10577